diff -pruN 0.26.4-3/.gitignore 0.34.0-1/.gitignore
--- 0.26.4-3/.gitignore	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/.gitignore	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,4 @@
+.tox/
+.*_cache
+__pycache__
+coverage.xml
diff -pruN 0.26.4-3/PKG-INFO 0.34.0-1/PKG-INFO
--- 0.26.4-3/PKG-INFO	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/PKG-INFO	2025-09-30 07:37:47.000000000 +0000
@@ -1,46 +1,51 @@
-Metadata-Version: 2.1
+Metadata-Version: 2.4
 Name: datamodel-code-generator
-Version: 0.26.4
+Version: 0.34.0
 Summary: Datamodel Code Generator
-Home-page: https://github.com/koxudaxi/datamodel-code-generator
-License: MIT
-Author: Koudai Aono
-Author-email: koxudaxi@gmail.com
-Requires-Python: >=3.8,<4.0
+Project-URL: Homepage, https://github.com/koxudaxi/datamodel-code-generator
+Project-URL: Source, https://github.com/koxudaxi/datamodel-code-generator
+Author-email: Koudai Aono <koxudaxi@gmail.com>
+License-Expression: MIT
+License-File: LICENSE
 Classifier: Development Status :: 4 - Beta
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Natural Language :: English
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3 :: Only
 Classifier: Programming Language :: Python :: 3.9
 Classifier: Programming Language :: Python :: 3.10
 Classifier: Programming Language :: Python :: 3.11
 Classifier: Programming Language :: Python :: 3.12
 Classifier: Programming Language :: Python :: 3.13
 Classifier: Programming Language :: Python :: Implementation :: CPython
+Requires-Python: >=3.9
+Requires-Dist: argcomplete<4,>=2.10.1
+Requires-Dist: black>=19.10b0
+Requires-Dist: genson<2,>=1.2.1
+Requires-Dist: inflect<8,>=4.1
+Requires-Dist: isort<7,>=4.3.21
+Requires-Dist: jinja2<4,>=2.10.1
+Requires-Dist: packaging
+Requires-Dist: pydantic>=1.5
+Requires-Dist: pyyaml>=6.0.1
+Requires-Dist: tomli<3,>=2.2.1; python_version <= '3.11'
+Provides-Extra: all
+Requires-Dist: graphql-core>=3.2.3; extra == 'all'
+Requires-Dist: httpx>=0.24.1; extra == 'all'
+Requires-Dist: openapi-spec-validator<0.7,>=0.2.8; extra == 'all'
+Requires-Dist: prance>=0.18.2; extra == 'all'
+Requires-Dist: pysnooper<2,>=0.4.1; extra == 'all'
+Requires-Dist: ruff>=0.9.10; extra == 'all'
 Provides-Extra: debug
+Requires-Dist: pysnooper<2,>=0.4.1; extra == 'debug'
 Provides-Extra: graphql
+Requires-Dist: graphql-core>=3.2.3; extra == 'graphql'
 Provides-Extra: http
+Requires-Dist: httpx>=0.24.1; extra == 'http'
+Provides-Extra: ruff
+Requires-Dist: ruff>=0.9.10; extra == 'ruff'
 Provides-Extra: validation
-Requires-Dist: PySnooper (>=0.4.1,<2.0.0) ; extra == "debug"
-Requires-Dist: argcomplete (>=1.10,<4.0)
-Requires-Dist: black (>=19.10b0)
-Requires-Dist: genson (>=1.2.1,<2.0)
-Requires-Dist: graphql-core (>=3.2.3,<4.0.0) ; extra == "graphql"
-Requires-Dist: httpx ; extra == "http"
-Requires-Dist: inflect (>=4.1.0,<6.0)
-Requires-Dist: isort (>=4.3.21,<6.0)
-Requires-Dist: jinja2 (>=2.10.1,<4.0)
-Requires-Dist: openapi-spec-validator (>=0.2.8,<0.7.0) ; extra == "validation"
-Requires-Dist: packaging
-Requires-Dist: prance (>=0.18.2) ; extra == "validation"
-Requires-Dist: pydantic[email] (>=1.10.0,!=2.0.0,!=2.0.1,<3.0,!=2.4.0) ; python_version >= "3.12" and python_version < "4.0"
-Requires-Dist: pydantic[email] (>=1.10.0,<3.0,!=2.4.0) ; python_version >= "3.11" and python_version < "4.0"
-Requires-Dist: pydantic[email] (>=1.5.1,<3.0,!=2.4.0) ; python_version < "3.10"
-Requires-Dist: pydantic[email] (>=1.9.0,<3.0,!=2.4.0) ; python_version >= "3.10" and python_version < "3.11"
-Requires-Dist: pyyaml (>=6.0.1)
-Requires-Dist: toml (>=0.10.0,<1.0.0) ; python_version < "3.11"
-Project-URL: Repository, https://github.com/koxudaxi/datamodel-code-generator
+Requires-Dist: openapi-spec-validator<0.7,>=0.2.8; extra == 'validation'
+Requires-Dist: prance>=0.18.2; extra == 'validation'
 Description-Content-Type: text/markdown
 
 # datamodel-code-generator
@@ -308,12 +313,6 @@ class Apis(BaseModel):
       <p>Astral</p>
     </a>
   </td>
-  <td valign="top" align="center">
-    <a href="https://github.com/DataDog">
-      <img src="https://avatars.githubusercontent.com/u/365230?s=200&v=4" alt="Datadog, Inc. Logo" style="width: 100px;">
-      <p>Datadog, Inc.</p>
-    </a>
-  </td>
   </tr>
 </table>
 
@@ -331,6 +330,8 @@ See the following linked projects for re
   - *[`Makefile`](https://github.com/argoproj-labs/hera/blob/c8cbf0c7a676de57469ca3d6aeacde7a5e84f8b7/Makefile#L53-L62)*
 - [awslabs/aws-lambda-powertools-python](https://github.com/awslabs/aws-lambda-powertools-python)
   - *Recommended for [advanced-use-cases](https://awslabs.github.io/aws-lambda-powertools-python/2.6.0/utilities/parser/#advanced-use-cases) in the official documentation*
+- [cloudcoil/cloudcoil](https://github.com/cloudcoil/cloudcoil)
+  - *[Cloudcoil - Model generation](https://github.com/cloudcoil/cloudcoil#%EF%B8%8F-model-generation)
 - [DataDog/integrations-core](https://github.com/DataDog/integrations-core)
   - *[Config models](https://github.com/DataDog/integrations-core/blob/master/docs/developer/meta/config-models.md)*
 - [hashintel/hash](https://github.com/hashintel/hash)
@@ -399,6 +400,8 @@ Options:
                         "datetime.date,datetime.datetime"
   --custom-formatters CUSTOM_FORMATTERS
                         List of modules with custom formatter (delimited list input).
+  --formatters {black,isort,ruff-check,ruff-format} [{black,isort,ruff-check,ruff-format} ...]
+                        Formatters for output (default: [black, isort])
   --http-headers HTTP_HEADER [HTTP_HEADER ...]
                         Set headers in HTTP requests to the remote host. (example:
                         "Authorization: Basic dXNlcjpwYXNz")
@@ -417,6 +420,8 @@ Options:
 Typing customization:
   --base-class BASE_CLASS
                         Base Class (default: pydantic.BaseModel)
+  --disable-future-imports
+                        Disable __future__ imports
   --enum-field-as-literal {all,one}
                         Parse enum field as literal. all: all enum field type are Literal.
                         one: field type is Literal when an enum has only one possible value
@@ -480,8 +485,8 @@ Field customization:
                         Use schema description to populate field docstring
 
 Model customization:
-  --allow-extra-fields  Allow to pass extra fields, if this flag is not passed, extra fields
-                        are forbidden.
+  --allow-extra-fields  Deprecated: Allow passing extra fields. This flag is deprecated. Use
+                        `--extra-fields=allow` instead.
   --allow-population-by-field-name
                         Allow population by field name
   --class-name CLASS_NAME
@@ -496,6 +501,10 @@ Model customization:
                         Enable faux immutability
   --enable-version-header
                         Enable package version on file headers
+  --extra-fields {allow,ignore,forbid}
+                        Set the generated models to allow, forbid, or ignore extra fields.
+  --frozen-dataclasses  Generate frozen dataclasses (dataclass(frozen=True)). Only applies
+                        to dataclass output.
   --keep-model-order    Keep generated models'' order
   --keyword-only        Defined models as keyword only (for example
                         dataclass(kw_only=True)).
@@ -503,10 +512,12 @@ Model customization:
                         Choose Datetime class between AwareDatetime, NaiveDatetime or
                         datetime. Each output model has its default mapping (for example
                         pydantic: datetime, dataclass: str, ...)
+  --parent-scoped-naming
+                        Set name of models defined inline from the parent model
   --reuse-model         Reuse models on the field when a module has the model with the same
                         content
-  --target-python-version {3.6,3.7,3.8,3.9,3.10,3.11,3.12}
-                        target python version (default: 3.8)
+  --target-python-version {3.9,3.10,3.11,3.12,3.13}
+                        target python version
   --treat-dot-as-module
                         treat dotted module names as modules
   --use-exact-imports   import exact types instead of modules, for example: "from .foo
@@ -528,7 +539,12 @@ Template customization:
                         Custom template directory
   --encoding ENCODING   The encoding of input and output (default: utf-8)
   --extra-template-data EXTRA_TEMPLATE_DATA
-                        Extra template data
+                        Extra template data for output models. Input is supposed to be a
+                        json/yaml file. For OpenAPI and Jsonschema the keys are the spec
+                        path of the object, or the name of the object if you want to apply
+                        the template data to multiple objects with the same name. If you are
+                        using another input file type (e.g. GraphQL), the key is the name of
+                        the object. The value is a dictionary of the template data to add.
   --use-double-quotes   Model generated with double quotes. Single quotes or your black
                         config skip_string_normalization value will be used without this
                         option.
@@ -537,6 +553,9 @@ Template customization:
                         option (require black 20.8b0 or later)
 
 OpenAPI-only options:
+  --include-path-parameters
+                        Include path parameters in generated parameter models in addition to
+                        query parameters (Only OpenAPI)
   --openapi-scopes {schemas,paths,tags,parameters} [{schemas,paths,tags,parameters} ...]
                         Scopes of OpenAPI model generation (default: schemas)
   --strict-nullable     Treat default field as a non-nullable field (Only OpenAPI)
@@ -577,4 +596,3 @@ See `docs/development-contributing.md` f
 ## License
 
 datamodel-code-generator is released under the MIT License. http://www.opensource.org/licenses/mit-license
-
diff -pruN 0.26.4-3/README.md 0.34.0-1/README.md
--- 0.26.4-3/README.md	2024-12-15 17:25:57.703037000 +0000
+++ 0.34.0-1/README.md	2025-09-30 07:37:47.000000000 +0000
@@ -263,12 +263,6 @@ class Apis(BaseModel):
       <p>Astral</p>
     </a>
   </td>
-  <td valign="top" align="center">
-    <a href="https://github.com/DataDog">
-      <img src="https://avatars.githubusercontent.com/u/365230?s=200&v=4" alt="Datadog, Inc. Logo" style="width: 100px;">
-      <p>Datadog, Inc.</p>
-    </a>
-  </td>
   </tr>
 </table>
 
@@ -286,6 +280,8 @@ See the following linked projects for re
   - *[`Makefile`](https://github.com/argoproj-labs/hera/blob/c8cbf0c7a676de57469ca3d6aeacde7a5e84f8b7/Makefile#L53-L62)*
 - [awslabs/aws-lambda-powertools-python](https://github.com/awslabs/aws-lambda-powertools-python)
   - *Recommended for [advanced-use-cases](https://awslabs.github.io/aws-lambda-powertools-python/2.6.0/utilities/parser/#advanced-use-cases) in the official documentation*
+- [cloudcoil/cloudcoil](https://github.com/cloudcoil/cloudcoil)
+  - *[Cloudcoil - Model generation](https://github.com/cloudcoil/cloudcoil#%EF%B8%8F-model-generation)
 - [DataDog/integrations-core](https://github.com/DataDog/integrations-core)
   - *[Config models](https://github.com/DataDog/integrations-core/blob/master/docs/developer/meta/config-models.md)*
 - [hashintel/hash](https://github.com/hashintel/hash)
@@ -354,6 +350,8 @@ Options:
                         "datetime.date,datetime.datetime"
   --custom-formatters CUSTOM_FORMATTERS
                         List of modules with custom formatter (delimited list input).
+  --formatters {black,isort,ruff-check,ruff-format} [{black,isort,ruff-check,ruff-format} ...]
+                        Formatters for output (default: [black, isort])
   --http-headers HTTP_HEADER [HTTP_HEADER ...]
                         Set headers in HTTP requests to the remote host. (example:
                         "Authorization: Basic dXNlcjpwYXNz")
@@ -372,6 +370,8 @@ Options:
 Typing customization:
   --base-class BASE_CLASS
                         Base Class (default: pydantic.BaseModel)
+  --disable-future-imports
+                        Disable __future__ imports
   --enum-field-as-literal {all,one}
                         Parse enum field as literal. all: all enum field type are Literal.
                         one: field type is Literal when an enum has only one possible value
@@ -435,8 +435,8 @@ Field customization:
                         Use schema description to populate field docstring
 
 Model customization:
-  --allow-extra-fields  Allow to pass extra fields, if this flag is not passed, extra fields
-                        are forbidden.
+  --allow-extra-fields  Deprecated: Allow passing extra fields. This flag is deprecated. Use
+                        `--extra-fields=allow` instead.
   --allow-population-by-field-name
                         Allow population by field name
   --class-name CLASS_NAME
@@ -451,6 +451,10 @@ Model customization:
                         Enable faux immutability
   --enable-version-header
                         Enable package version on file headers
+  --extra-fields {allow,ignore,forbid}
+                        Set the generated models to allow, forbid, or ignore extra fields.
+  --frozen-dataclasses  Generate frozen dataclasses (dataclass(frozen=True)). Only applies
+                        to dataclass output.
   --keep-model-order    Keep generated models'' order
   --keyword-only        Defined models as keyword only (for example
                         dataclass(kw_only=True)).
@@ -458,10 +462,12 @@ Model customization:
                         Choose Datetime class between AwareDatetime, NaiveDatetime or
                         datetime. Each output model has its default mapping (for example
                         pydantic: datetime, dataclass: str, ...)
+  --parent-scoped-naming
+                        Set name of models defined inline from the parent model
   --reuse-model         Reuse models on the field when a module has the model with the same
                         content
-  --target-python-version {3.6,3.7,3.8,3.9,3.10,3.11,3.12}
-                        target python version (default: 3.8)
+  --target-python-version {3.9,3.10,3.11,3.12,3.13}
+                        target python version
   --treat-dot-as-module
                         treat dotted module names as modules
   --use-exact-imports   import exact types instead of modules, for example: "from .foo
@@ -483,7 +489,12 @@ Template customization:
                         Custom template directory
   --encoding ENCODING   The encoding of input and output (default: utf-8)
   --extra-template-data EXTRA_TEMPLATE_DATA
-                        Extra template data
+                        Extra template data for output models. Input is supposed to be a
+                        json/yaml file. For OpenAPI and Jsonschema the keys are the spec
+                        path of the object, or the name of the object if you want to apply
+                        the template data to multiple objects with the same name. If you are
+                        using another input file type (e.g. GraphQL), the key is the name of
+                        the object. The value is a dictionary of the template data to add.
   --use-double-quotes   Model generated with double quotes. Single quotes or your black
                         config skip_string_normalization value will be used without this
                         option.
@@ -492,6 +503,9 @@ Template customization:
                         option (require black 20.8b0 or later)
 
 OpenAPI-only options:
+  --include-path-parameters
+                        Include path parameters in generated parameter models in addition to
+                        query parameters (Only OpenAPI)
   --openapi-scopes {schemas,paths,tags,parameters} [{schemas,paths,tags,parameters} ...]
                         Scopes of OpenAPI model generation (default: schemas)
   --strict-nullable     Treat default field as a non-nullable field (Only OpenAPI)
diff -pruN 0.26.4-3/datamodel_code_generator/__init__.py 0.34.0-1/datamodel_code_generator/__init__.py
--- 0.26.4-3/datamodel_code_generator/__init__.py	2024-12-15 17:25:57.703037000 +0000
+++ 0.34.0-1/datamodel_code_generator/__init__.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,587 +0,0 @@
-from __future__ import annotations
-
-import contextlib
-import os
-import sys
-from datetime import datetime, timezone
-from enum import Enum
-from pathlib import Path
-from typing import (
-    IO,
-    TYPE_CHECKING,
-    Any,
-    Callable,
-    DefaultDict,
-    Dict,
-    Iterator,
-    List,
-    Mapping,
-    Optional,
-    Sequence,
-    Set,
-    TextIO,
-    Tuple,
-    Type,
-    TypeVar,
-    Union,
-)
-from urllib.parse import ParseResult
-
-import yaml
-
-import datamodel_code_generator.pydantic_patch  # noqa: F401
-from datamodel_code_generator.format import DatetimeClassType, PythonVersion
-from datamodel_code_generator.model.pydantic_v2 import UnionMode
-from datamodel_code_generator.parser import DefaultPutDict, LiteralType
-from datamodel_code_generator.parser.base import Parser
-from datamodel_code_generator.types import StrictTypes
-from datamodel_code_generator.util import SafeLoader  # type: ignore
-
-T = TypeVar('T')
-
-try:
-    import pysnooper
-
-    pysnooper.tracer.DISABLED = True
-except ImportError:  # pragma: no cover
-    pysnooper = None
-
-DEFAULT_BASE_CLASS: str = 'pydantic.BaseModel'
-
-
-def load_yaml(stream: Union[str, TextIO]) -> Any:
-    return yaml.load(stream, Loader=SafeLoader)
-
-
-def load_yaml_from_path(path: Path, encoding: str) -> Any:
-    with path.open(encoding=encoding) as f:
-        return load_yaml(f)
-
-
-if TYPE_CHECKING:
-
-    def get_version() -> str: ...
-
-else:
-
-    def get_version() -> str:
-        package = 'datamodel-code-generator'
-
-        from importlib.metadata import version
-
-        return version(package)
-
-
-def enable_debug_message() -> None:  # pragma: no cover
-    if not pysnooper:
-        raise Exception(
-            "Please run `$pip install 'datamodel-code-generator[debug]'` to use debug option"
-        )
-
-    pysnooper.tracer.DISABLED = False
-
-
-def snooper_to_methods(  # type: ignore
-    output=None,
-    watch=(),
-    watch_explode=(),
-    depth=1,
-    prefix='',
-    overwrite=False,
-    thread_info=False,
-    custom_repr=(),
-    max_variable_length=100,
-) -> Callable[..., Any]:
-    def inner(cls: Type[T]) -> Type[T]:
-        if not pysnooper:
-            return cls
-        import inspect
-
-        methods = inspect.getmembers(cls, predicate=inspect.isfunction)
-        for name, method in methods:
-            snooper_method = pysnooper.snoop(
-                output,
-                watch,
-                watch_explode,
-                depth,
-                prefix,
-                overwrite,
-                thread_info,
-                custom_repr,
-                max_variable_length,
-            )(method)
-            setattr(cls, name, snooper_method)
-        return cls
-
-    return inner
-
-
-@contextlib.contextmanager
-def chdir(path: Optional[Path]) -> Iterator[None]:
-    """Changes working directory and returns to previous on exit."""
-
-    if path is None:
-        yield
-    else:
-        prev_cwd = Path.cwd()
-        try:
-            os.chdir(path if path.is_dir() else path.parent)
-            yield
-        finally:
-            os.chdir(prev_cwd)
-
-
-def is_openapi(text: str) -> bool:
-    return 'openapi' in load_yaml(text)
-
-
-JSON_SCHEMA_URLS: Tuple[str, ...] = (
-    'http://json-schema.org/',
-    'https://json-schema.org/',
-)
-
-
-def is_schema(text: str) -> bool:
-    data = load_yaml(text)
-    if not isinstance(data, dict):
-        return False
-    schema = data.get('$schema')
-    if isinstance(schema, str) and any(
-        schema.startswith(u) for u in JSON_SCHEMA_URLS
-    ):  # pragma: no cover
-        return True
-    if isinstance(data.get('type'), str):
-        return True
-    if any(
-        isinstance(data.get(o), list)
-        for o in (
-            'allOf',
-            'anyOf',
-            'oneOf',
-        )
-    ):
-        return True
-    if isinstance(data.get('properties'), dict):
-        return True
-    return False
-
-
-class InputFileType(Enum):
-    Auto = 'auto'
-    OpenAPI = 'openapi'
-    JsonSchema = 'jsonschema'
-    Json = 'json'
-    Yaml = 'yaml'
-    Dict = 'dict'
-    CSV = 'csv'
-    GraphQL = 'graphql'
-
-
-RAW_DATA_TYPES: List[InputFileType] = [
-    InputFileType.Json,
-    InputFileType.Yaml,
-    InputFileType.Dict,
-    InputFileType.CSV,
-    InputFileType.GraphQL,
-]
-
-
-class DataModelType(Enum):
-    PydanticBaseModel = 'pydantic.BaseModel'
-    PydanticV2BaseModel = 'pydantic_v2.BaseModel'
-    DataclassesDataclass = 'dataclasses.dataclass'
-    TypingTypedDict = 'typing.TypedDict'
-    MsgspecStruct = 'msgspec.Struct'
-
-
-class OpenAPIScope(Enum):
-    Schemas = 'schemas'
-    Paths = 'paths'
-    Tags = 'tags'
-    Parameters = 'parameters'
-
-
-class GraphQLScope(Enum):
-    Schema = 'schema'
-
-
-class Error(Exception):
-    def __init__(self, message: str) -> None:
-        self.message: str = message
-
-    def __str__(self) -> str:
-        return self.message
-
-
-class InvalidClassNameError(Error):
-    def __init__(self, class_name: str) -> None:
-        self.class_name = class_name
-        message = f'title={repr(class_name)} is invalid class name.'
-        super().__init__(message=message)
-
-
-def get_first_file(path: Path) -> Path:  # pragma: no cover
-    if path.is_file():
-        return path
-    elif path.is_dir():
-        for child in path.rglob('*'):
-            if child.is_file():
-                return child
-    raise Error('File not found')
-
-
-def generate(
-    input_: Union[Path, str, ParseResult, Mapping[str, Any]],
-    *,
-    input_filename: Optional[str] = None,
-    input_file_type: InputFileType = InputFileType.Auto,
-    output: Optional[Path] = None,
-    output_model_type: DataModelType = DataModelType.PydanticBaseModel,
-    target_python_version: PythonVersion = PythonVersion.PY_38,
-    base_class: str = '',
-    additional_imports: Optional[List[str]] = None,
-    custom_template_dir: Optional[Path] = None,
-    extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-    validation: bool = False,
-    field_constraints: bool = False,
-    snake_case_field: bool = False,
-    strip_default_none: bool = False,
-    aliases: Optional[Mapping[str, str]] = None,
-    disable_timestamp: bool = False,
-    enable_version_header: bool = False,
-    allow_population_by_field_name: bool = False,
-    allow_extra_fields: bool = False,
-    apply_default_values_for_required_fields: bool = False,
-    force_optional_for_required_fields: bool = False,
-    class_name: Optional[str] = None,
-    use_standard_collections: bool = False,
-    use_schema_description: bool = False,
-    use_field_description: bool = False,
-    use_default_kwarg: bool = False,
-    reuse_model: bool = False,
-    encoding: str = 'utf-8',
-    enum_field_as_literal: Optional[LiteralType] = None,
-    use_one_literal_as_default: bool = False,
-    set_default_enum_member: bool = False,
-    use_subclass_enum: bool = False,
-    strict_nullable: bool = False,
-    use_generic_container_types: bool = False,
-    enable_faux_immutability: bool = False,
-    disable_appending_item_suffix: bool = False,
-    strict_types: Optional[Sequence[StrictTypes]] = None,
-    empty_enum_field_name: Optional[str] = None,
-    custom_class_name_generator: Optional[Callable[[str], str]] = None,
-    field_extra_keys: Optional[Set[str]] = None,
-    field_include_all_keys: bool = False,
-    field_extra_keys_without_x_prefix: Optional[Set[str]] = None,
-    openapi_scopes: Optional[List[OpenAPIScope]] = None,
-    graphql_scopes: Optional[List[GraphQLScope]] = None,
-    wrap_string_literal: Optional[bool] = None,
-    use_title_as_name: bool = False,
-    use_operation_id_as_name: bool = False,
-    use_unique_items_as_set: bool = False,
-    http_headers: Optional[Sequence[Tuple[str, str]]] = None,
-    http_ignore_tls: bool = False,
-    use_annotated: bool = False,
-    use_non_positive_negative_number_constrained_types: bool = False,
-    original_field_name_delimiter: Optional[str] = None,
-    use_double_quotes: bool = False,
-    use_union_operator: bool = False,
-    collapse_root_models: bool = False,
-    special_field_name_prefix: Optional[str] = None,
-    remove_special_field_name_prefix: bool = False,
-    capitalise_enum_members: bool = False,
-    keep_model_order: bool = False,
-    custom_file_header: Optional[str] = None,
-    custom_file_header_path: Optional[Path] = None,
-    custom_formatters: Optional[List[str]] = None,
-    custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
-    use_pendulum: bool = False,
-    http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
-    treat_dots_as_module: bool = False,
-    use_exact_imports: bool = False,
-    union_mode: Optional[UnionMode] = None,
-    output_datetime_class: Optional[DatetimeClassType] = None,
-    keyword_only: bool = False,
-    no_alias: bool = False,
-) -> None:
-    remote_text_cache: DefaultPutDict[str, str] = DefaultPutDict()
-    if isinstance(input_, str):
-        input_text: Optional[str] = input_
-    elif isinstance(input_, ParseResult):
-        from datamodel_code_generator.http import get_body
-
-        input_text = remote_text_cache.get_or_put(
-            input_.geturl(),
-            default_factory=lambda url: get_body(
-                url, http_headers, http_ignore_tls, http_query_parameters
-            ),
-        )
-    else:
-        input_text = None
-
-    if isinstance(input_, Path) and not input_.is_absolute():
-        input_ = input_.expanduser().resolve()
-    if input_file_type == InputFileType.Auto:
-        try:
-            input_text_ = (
-                get_first_file(input_).read_text(encoding=encoding)
-                if isinstance(input_, Path)
-                else input_text
-            )
-            assert isinstance(input_text_, str)
-            input_file_type = infer_input_type(input_text_)
-            print(
-                inferred_message.format(input_file_type.value),
-                file=sys.stderr,
-            )
-        except:  # noqa
-            raise Error('Invalid file format')
-
-    kwargs: Dict[str, Any] = {}
-    if input_file_type == InputFileType.OpenAPI:
-        from datamodel_code_generator.parser.openapi import OpenAPIParser
-
-        parser_class: Type[Parser] = OpenAPIParser
-        kwargs['openapi_scopes'] = openapi_scopes
-    elif input_file_type == InputFileType.GraphQL:
-        from datamodel_code_generator.parser.graphql import GraphQLParser
-
-        parser_class: Type[Parser] = GraphQLParser
-    else:
-        from datamodel_code_generator.parser.jsonschema import JsonSchemaParser
-
-        parser_class = JsonSchemaParser
-
-        if input_file_type in RAW_DATA_TYPES:
-            import json
-
-            try:
-                if isinstance(input_, Path) and input_.is_dir():  # pragma: no cover
-                    raise Error(f'Input must be a file for {input_file_type}')
-                obj: Dict[Any, Any]
-                if input_file_type == InputFileType.CSV:
-                    import csv
-
-                    def get_header_and_first_line(csv_file: IO[str]) -> Dict[str, Any]:
-                        csv_reader = csv.DictReader(csv_file)
-                        return dict(zip(csv_reader.fieldnames, next(csv_reader)))  # type: ignore
-
-                    if isinstance(input_, Path):
-                        with input_.open(encoding=encoding) as f:
-                            obj = get_header_and_first_line(f)
-                    else:
-                        import io
-
-                        obj = get_header_and_first_line(io.StringIO(input_text))
-                elif input_file_type == InputFileType.Yaml:
-                    obj = load_yaml(
-                        input_.read_text(encoding=encoding)  # type: ignore
-                        if isinstance(input_, Path)
-                        else input_text
-                    )
-                elif input_file_type == InputFileType.Json:
-                    obj = json.loads(
-                        input_.read_text(encoding=encoding)  # type: ignore
-                        if isinstance(input_, Path)
-                        else input_text
-                    )
-                elif input_file_type == InputFileType.Dict:
-                    import ast
-
-                    # Input can be a dict object stored in a python file
-                    obj = (
-                        ast.literal_eval(
-                            input_.read_text(encoding=encoding)  # type: ignore
-                        )
-                        if isinstance(input_, Path)
-                        else input_
-                    )
-                else:  # pragma: no cover
-                    raise Error(f'Unsupported input file type: {input_file_type}')
-            except:  # noqa
-                raise Error('Invalid file format')
-
-            from genson import SchemaBuilder
-
-            builder = SchemaBuilder()
-            builder.add_object(obj)
-            input_text = json.dumps(builder.to_schema())
-
-    if isinstance(input_, ParseResult) and input_file_type not in RAW_DATA_TYPES:
-        input_text = None
-
-    if union_mode is not None:
-        if output_model_type == DataModelType.PydanticV2BaseModel:
-            default_field_extras = {'union_mode': union_mode}
-        else:  # pragma: no cover
-            raise Error('union_mode is only supported for pydantic_v2.BaseModel')
-    else:
-        default_field_extras = None
-
-    from datamodel_code_generator.model import get_data_model_types
-
-    data_model_types = get_data_model_types(
-        output_model_type, target_python_version, output_datetime_class
-    )
-    parser = parser_class(
-        source=input_text or input_,
-        data_model_type=data_model_types.data_model,
-        data_model_root_type=data_model_types.root_model,
-        data_model_field_type=data_model_types.field_model,
-        data_type_manager_type=data_model_types.data_type_manager,
-        base_class=base_class,
-        additional_imports=additional_imports,
-        custom_template_dir=custom_template_dir,
-        extra_template_data=extra_template_data,
-        target_python_version=target_python_version,
-        dump_resolve_reference_action=data_model_types.dump_resolve_reference_action,
-        validation=validation,
-        field_constraints=field_constraints,
-        snake_case_field=snake_case_field,
-        strip_default_none=strip_default_none,
-        aliases=aliases,
-        allow_population_by_field_name=allow_population_by_field_name,
-        allow_extra_fields=allow_extra_fields,
-        apply_default_values_for_required_fields=apply_default_values_for_required_fields,
-        force_optional_for_required_fields=force_optional_for_required_fields,
-        class_name=class_name,
-        use_standard_collections=use_standard_collections,
-        base_path=input_.parent
-        if isinstance(input_, Path) and input_.is_file()
-        else None,
-        use_schema_description=use_schema_description,
-        use_field_description=use_field_description,
-        use_default_kwarg=use_default_kwarg,
-        reuse_model=reuse_model,
-        enum_field_as_literal=LiteralType.All
-        if output_model_type == DataModelType.TypingTypedDict
-        else enum_field_as_literal,
-        use_one_literal_as_default=use_one_literal_as_default,
-        set_default_enum_member=True
-        if output_model_type == DataModelType.DataclassesDataclass
-        else set_default_enum_member,
-        use_subclass_enum=use_subclass_enum,
-        strict_nullable=strict_nullable,
-        use_generic_container_types=use_generic_container_types,
-        enable_faux_immutability=enable_faux_immutability,
-        remote_text_cache=remote_text_cache,
-        disable_appending_item_suffix=disable_appending_item_suffix,
-        strict_types=strict_types,
-        empty_enum_field_name=empty_enum_field_name,
-        custom_class_name_generator=custom_class_name_generator,
-        field_extra_keys=field_extra_keys,
-        field_include_all_keys=field_include_all_keys,
-        field_extra_keys_without_x_prefix=field_extra_keys_without_x_prefix,
-        wrap_string_literal=wrap_string_literal,
-        use_title_as_name=use_title_as_name,
-        use_operation_id_as_name=use_operation_id_as_name,
-        use_unique_items_as_set=use_unique_items_as_set,
-        http_headers=http_headers,
-        http_ignore_tls=http_ignore_tls,
-        use_annotated=use_annotated,
-        use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
-        original_field_name_delimiter=original_field_name_delimiter,
-        use_double_quotes=use_double_quotes,
-        use_union_operator=use_union_operator,
-        collapse_root_models=collapse_root_models,
-        special_field_name_prefix=special_field_name_prefix,
-        remove_special_field_name_prefix=remove_special_field_name_prefix,
-        capitalise_enum_members=capitalise_enum_members,
-        keep_model_order=keep_model_order,
-        known_third_party=data_model_types.known_third_party,
-        custom_formatters=custom_formatters,
-        custom_formatters_kwargs=custom_formatters_kwargs,
-        use_pendulum=use_pendulum,
-        http_query_parameters=http_query_parameters,
-        treat_dots_as_module=treat_dots_as_module,
-        use_exact_imports=use_exact_imports,
-        default_field_extras=default_field_extras,
-        target_datetime_class=output_datetime_class,
-        keyword_only=keyword_only,
-        no_alias=no_alias,
-        **kwargs,
-    )
-
-    with chdir(output):
-        results = parser.parse()
-    if not input_filename:  # pragma: no cover
-        if isinstance(input_, str):
-            input_filename = '<stdin>'
-        elif isinstance(input_, ParseResult):
-            input_filename = input_.geturl()
-        elif input_file_type == InputFileType.Dict:
-            # input_ might be a dict object provided directly, and missing a name field
-            input_filename = getattr(input_, 'name', '<dict>')
-        else:
-            input_filename = input_.name
-    if not results:
-        raise Error('Models not found in the input data')
-    elif isinstance(results, str):
-        modules = {output: (results, input_filename)}
-    else:
-        if output is None:
-            raise Error('Modular references require an output directory')
-        if output.suffix:
-            raise Error('Modular references require an output directory, not a file')
-        modules = {
-            output.joinpath(*name): (
-                result.body,
-                str(result.source.as_posix() if result.source else input_filename),
-            )
-            for name, result in sorted(results.items())
-        }
-
-    timestamp = datetime.now(timezone.utc).replace(microsecond=0).isoformat()
-
-    if custom_file_header is None and custom_file_header_path:
-        custom_file_header = custom_file_header_path.read_text(encoding=encoding)
-
-    header = """\
-# generated by datamodel-codegen:
-#   filename:  {}"""
-    if not disable_timestamp:
-        header += f'\n#   timestamp: {timestamp}'
-    if enable_version_header:
-        header += f'\n#   version:   {get_version()}'
-
-    file: Optional[IO[Any]]
-    for path, (body, filename) in modules.items():
-        if path is None:
-            file = None
-        else:
-            if not path.parent.exists():
-                path.parent.mkdir(parents=True)
-            file = path.open('wt', encoding=encoding)
-
-        print(custom_file_header or header.format(filename), file=file)
-        if body:
-            print('', file=file)
-            print(body.rstrip(), file=file)
-
-        if file is not None:
-            file.close()
-
-
-def infer_input_type(text: str) -> InputFileType:
-    if is_openapi(text):
-        return InputFileType.OpenAPI
-    elif is_schema(text):
-        return InputFileType.JsonSchema
-    return InputFileType.Json
-
-
-inferred_message = (
-    'The input file type was determined to be: {}\nThis can be specified explicitly with the '
-    '`--input-file-type` option.'
-)
-
-__all__ = [
-    'DefaultPutDict',
-    'Error',
-    'InputFileType',
-    'InvalidClassNameError',
-    'LiteralType',
-    'PythonVersion',
-    'generate',
-]
diff -pruN 0.26.4-3/datamodel_code_generator/__main__.py 0.34.0-1/datamodel_code_generator/__main__.py
--- 0.26.4-3/datamodel_code_generator/__main__.py	2024-12-15 17:25:57.703037000 +0000
+++ 0.34.0-1/datamodel_code_generator/__main__.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,568 +0,0 @@
-#! /usr/bin/env python
-
-"""
-Main function.
-"""
-
-from __future__ import annotations
-
-import json
-import signal
-import sys
-import warnings
-from collections import defaultdict
-from enum import IntEnum
-from io import TextIOBase
-from pathlib import Path
-from typing import (
-    TYPE_CHECKING,
-    Any,
-    DefaultDict,
-    Dict,
-    List,
-    Optional,
-    Sequence,
-    Set,
-    Tuple,
-    Union,
-    cast,
-)
-from urllib.parse import ParseResult, urlparse
-
-import argcomplete
-import black
-from pydantic import BaseModel
-
-from datamodel_code_generator.model.pydantic_v2 import UnionMode
-
-if TYPE_CHECKING:
-    from argparse import Namespace
-
-    from typing_extensions import Self
-
-from datamodel_code_generator import (
-    DataModelType,
-    Error,
-    InputFileType,
-    InvalidClassNameError,
-    OpenAPIScope,
-    enable_debug_message,
-    generate,
-)
-from datamodel_code_generator.arguments import DEFAULT_ENCODING, arg_parser, namespace
-from datamodel_code_generator.format import (
-    DatetimeClassType,
-    PythonVersion,
-    black_find_project_root,
-    is_supported_in_black,
-)
-from datamodel_code_generator.parser import LiteralType
-from datamodel_code_generator.reference import is_url
-from datamodel_code_generator.types import StrictTypes
-from datamodel_code_generator.util import (
-    PYDANTIC_V2,
-    ConfigDict,
-    Model,
-    field_validator,
-    load_toml,
-    model_validator,
-)
-
-
-class Exit(IntEnum):
-    """Exit reasons."""
-
-    OK = 0
-    ERROR = 1
-    KeyboardInterrupt = 2
-
-
-def sig_int_handler(_: int, __: Any) -> None:  # pragma: no cover
-    exit(Exit.OK)
-
-
-signal.signal(signal.SIGINT, sig_int_handler)
-
-
-class Config(BaseModel):
-    if PYDANTIC_V2:
-        model_config = ConfigDict(arbitrary_types_allowed=True)
-
-        def get(self, item: str) -> Any:
-            return getattr(self, item)
-
-        def __getitem__(self, item: str) -> Any:
-            return self.get(item)
-
-        if TYPE_CHECKING:
-
-            @classmethod
-            def get_fields(cls) -> Dict[str, Any]: ...
-
-        else:
-
-            @classmethod
-            def parse_obj(cls: type[Model], obj: Any) -> Model:
-                return cls.model_validate(obj)
-
-            @classmethod
-            def get_fields(cls) -> Dict[str, Any]:
-                return cls.model_fields
-
-    else:
-
-        class Config:
-            # validate_assignment = True
-            # Pydantic 1.5.1 doesn't support validate_assignment correctly
-            arbitrary_types_allowed = (TextIOBase,)
-
-        if not TYPE_CHECKING:
-
-            @classmethod
-            def get_fields(cls) -> Dict[str, Any]:
-                return cls.__fields__
-
-    @field_validator(
-        'aliases', 'extra_template_data', 'custom_formatters_kwargs', mode='before'
-    )
-    def validate_file(cls, value: Any) -> Optional[TextIOBase]:
-        if value is None or isinstance(value, TextIOBase):
-            return value
-        return cast(TextIOBase, Path(value).expanduser().resolve().open('rt'))
-
-    @field_validator(
-        'input',
-        'output',
-        'custom_template_dir',
-        'custom_file_header_path',
-        mode='before',
-    )
-    def validate_path(cls, value: Any) -> Optional[Path]:
-        if value is None or isinstance(value, Path):
-            return value  # pragma: no cover
-        return Path(value).expanduser().resolve()
-
-    @field_validator('url', mode='before')
-    def validate_url(cls, value: Any) -> Optional[ParseResult]:
-        if isinstance(value, str) and is_url(value):  # pragma: no cover
-            return urlparse(value)
-        elif value is None:  # pragma: no cover
-            return None
-        raise Error(
-            f"This protocol doesn't support only http/https. --input={value}"
-        )  # pragma: no cover
-
-    @model_validator(mode='after')
-    def validate_use_generic_container_types(
-        cls, values: Dict[str, Any]
-    ) -> Dict[str, Any]:
-        if values.get('use_generic_container_types'):
-            target_python_version: PythonVersion = values['target_python_version']
-            if target_python_version == target_python_version.PY_36:
-                raise Error(
-                    f'`--use-generic-container-types` can not be used with `--target-python-version` {target_python_version.PY_36.value}.\n'
-                    ' The version will be not supported in a future version'
-                )
-        return values
-
-    @model_validator(mode='after')
-    def validate_original_field_name_delimiter(
-        cls, values: Dict[str, Any]
-    ) -> Dict[str, Any]:
-        if values.get('original_field_name_delimiter') is not None:
-            if not values.get('snake_case_field'):
-                raise Error(
-                    '`--original-field-name-delimiter` can not be used without `--snake-case-field`.'
-                )
-        return values
-
-    @model_validator(mode='after')
-    def validate_custom_file_header(cls, values: Dict[str, Any]) -> Dict[str, Any]:
-        if values.get('custom_file_header') and values.get('custom_file_header_path'):
-            raise Error(
-                '`--custom_file_header_path` can not be used with `--custom_file_header`.'
-            )  # pragma: no cover
-        return values
-
-    @model_validator(mode='after')
-    def validate_keyword_only(cls, values: Dict[str, Any]) -> Dict[str, Any]:
-        output_model_type: DataModelType = values.get('output_model_type')
-        python_target: PythonVersion = values.get('target_python_version')
-        if (
-            values.get('keyword_only')
-            and output_model_type == DataModelType.DataclassesDataclass
-            and not python_target.has_kw_only_dataclass
-        ):
-            raise Error(
-                f'`--keyword-only` requires `--target-python-version` {PythonVersion.PY_310.value} or higher.'
-            )
-        return values
-
-    @model_validator(mode='after')
-    def validate_output_datetime_class(cls, values: Dict[str, Any]) -> Dict[str, Any]:
-        datetime_class_type: Optional[DatetimeClassType] = values.get(
-            'output_datetime_class'
-        )
-        if (
-            datetime_class_type
-            and datetime_class_type is not DatetimeClassType.Datetime
-            and values.get('output_model_type') == DataModelType.DataclassesDataclass
-        ):
-            raise Error(
-                '`--output-datetime-class` only allows "datetime" for '
-                f'`--output-model-type` {DataModelType.DataclassesDataclass.value}'
-            )
-        return values
-
-    # Pydantic 1.5.1 doesn't support each_item=True correctly
-    @field_validator('http_headers', mode='before')
-    def validate_http_headers(cls, value: Any) -> Optional[List[Tuple[str, str]]]:
-        def validate_each_item(each_item: Any) -> Tuple[str, str]:
-            if isinstance(each_item, str):  # pragma: no cover
-                try:
-                    field_name, field_value = each_item.split(':', maxsplit=1)  # type: str, str
-                    return field_name, field_value.lstrip()
-                except ValueError:
-                    raise Error(f'Invalid http header: {each_item!r}')
-            return each_item  # pragma: no cover
-
-        if isinstance(value, list):
-            return [validate_each_item(each_item) for each_item in value]
-        return value  # pragma: no cover
-
-    @field_validator('http_query_parameters', mode='before')
-    def validate_http_query_parameters(
-        cls, value: Any
-    ) -> Optional[List[Tuple[str, str]]]:
-        def validate_each_item(each_item: Any) -> Tuple[str, str]:
-            if isinstance(each_item, str):  # pragma: no cover
-                try:
-                    field_name, field_value = each_item.split('=', maxsplit=1)  # type: str, str
-                    return field_name, field_value.lstrip()
-                except ValueError:
-                    raise Error(f'Invalid http query parameter: {each_item!r}')
-            return each_item  # pragma: no cover
-
-        if isinstance(value, list):
-            return [validate_each_item(each_item) for each_item in value]
-        return value  # pragma: no cover
-
-    @model_validator(mode='before')
-    def validate_additional_imports(cls, values: Dict[str, Any]) -> Dict[str, Any]:
-        if values.get('additional_imports') is not None:
-            values['additional_imports'] = values.get('additional_imports').split(',')
-        return values
-
-    @model_validator(mode='before')
-    def validate_custom_formatters(cls, values: Dict[str, Any]) -> Dict[str, Any]:
-        if values.get('custom_formatters') is not None:
-            values['custom_formatters'] = values.get('custom_formatters').split(',')
-        return values
-
-    if PYDANTIC_V2:
-
-        @model_validator(mode='after')  # type: ignore
-        def validate_root(self: Self) -> Self:
-            if self.use_annotated:
-                self.field_constraints = True
-            return self
-
-    else:
-
-        @model_validator(mode='after')
-        def validate_root(cls, values: Any) -> Any:
-            if values.get('use_annotated'):
-                values['field_constraints'] = True
-            return values
-
-    input: Optional[Union[Path, str]] = None
-    input_file_type: InputFileType = InputFileType.Auto
-    output_model_type: DataModelType = DataModelType.PydanticBaseModel
-    output: Optional[Path] = None
-    debug: bool = False
-    disable_warnings: bool = False
-    target_python_version: PythonVersion = PythonVersion.PY_38
-    base_class: str = ''
-    additional_imports: Optional[List[str]] = (None,)
-    custom_template_dir: Optional[Path] = None
-    extra_template_data: Optional[TextIOBase] = None
-    validation: bool = False
-    field_constraints: bool = False
-    snake_case_field: bool = False
-    strip_default_none: bool = False
-    aliases: Optional[TextIOBase] = None
-    disable_timestamp: bool = False
-    enable_version_header: bool = False
-    allow_population_by_field_name: bool = False
-    allow_extra_fields: bool = False
-    use_default: bool = False
-    force_optional: bool = False
-    class_name: Optional[str] = None
-    use_standard_collections: bool = False
-    use_schema_description: bool = False
-    use_field_description: bool = False
-    use_default_kwarg: bool = False
-    reuse_model: bool = False
-    encoding: str = DEFAULT_ENCODING
-    enum_field_as_literal: Optional[LiteralType] = None
-    use_one_literal_as_default: bool = False
-    set_default_enum_member: bool = False
-    use_subclass_enum: bool = False
-    strict_nullable: bool = False
-    use_generic_container_types: bool = False
-    use_union_operator: bool = False
-    enable_faux_immutability: bool = False
-    url: Optional[ParseResult] = None
-    disable_appending_item_suffix: bool = False
-    strict_types: List[StrictTypes] = []
-    empty_enum_field_name: Optional[str] = None
-    field_extra_keys: Optional[Set[str]] = None
-    field_include_all_keys: bool = False
-    field_extra_keys_without_x_prefix: Optional[Set[str]] = None
-    openapi_scopes: Optional[List[OpenAPIScope]] = [OpenAPIScope.Schemas]
-    wrap_string_literal: Optional[bool] = None
-    use_title_as_name: bool = False
-    use_operation_id_as_name: bool = False
-    use_unique_items_as_set: bool = False
-    http_headers: Optional[Sequence[Tuple[str, str]]] = None
-    http_ignore_tls: bool = False
-    use_annotated: bool = False
-    use_non_positive_negative_number_constrained_types: bool = False
-    original_field_name_delimiter: Optional[str] = None
-    use_double_quotes: bool = False
-    collapse_root_models: bool = False
-    special_field_name_prefix: Optional[str] = None
-    remove_special_field_name_prefix: bool = False
-    capitalise_enum_members: bool = False
-    keep_model_order: bool = False
-    custom_file_header: Optional[str] = None
-    custom_file_header_path: Optional[Path] = None
-    custom_formatters: Optional[List[str]] = None
-    custom_formatters_kwargs: Optional[TextIOBase] = None
-    use_pendulum: bool = False
-    http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None
-    treat_dot_as_module: bool = False
-    use_exact_imports: bool = False
-    union_mode: Optional[UnionMode] = None
-    output_datetime_class: Optional[DatetimeClassType] = None
-    keyword_only: bool = False
-    no_alias: bool = False
-
-    def merge_args(self, args: Namespace) -> None:
-        set_args = {
-            f: getattr(args, f)
-            for f in self.get_fields()
-            if getattr(args, f) is not None
-        }
-
-        if set_args.get('output_model_type') == DataModelType.MsgspecStruct.value:
-            set_args['use_annotated'] = True
-
-        if set_args.get('use_annotated'):
-            set_args['field_constraints'] = True
-
-        parsed_args = Config.parse_obj(set_args)
-        for field_name in set_args:
-            setattr(self, field_name, getattr(parsed_args, field_name))
-
-
-def main(args: Optional[Sequence[str]] = None) -> Exit:
-    """Main function."""
-
-    # add cli completion support
-    argcomplete.autocomplete(arg_parser)
-
-    if args is None:  # pragma: no cover
-        args = sys.argv[1:]
-
-    arg_parser.parse_args(args, namespace=namespace)
-
-    if namespace.version:
-        from datamodel_code_generator.version import version
-
-        print(version)
-        exit(0)
-
-    root = black_find_project_root((Path().resolve(),))
-    pyproject_toml_path = root / 'pyproject.toml'
-    if pyproject_toml_path.is_file():
-        pyproject_toml: Dict[str, Any] = {
-            k.replace('-', '_'): v
-            for k, v in load_toml(pyproject_toml_path)
-            .get('tool', {})
-            .get('datamodel-codegen', {})
-            .items()
-        }
-    else:
-        pyproject_toml = {}
-
-    try:
-        config = Config.parse_obj(pyproject_toml)
-        config.merge_args(namespace)
-    except Error as e:
-        print(e.message, file=sys.stderr)
-        return Exit.ERROR
-
-    if not config.input and not config.url and sys.stdin.isatty():
-        print(
-            'Not Found Input: require `stdin` or arguments `--input` or `--url`',
-            file=sys.stderr,
-        )
-        arg_parser.print_help()
-        return Exit.ERROR
-
-    if not is_supported_in_black(config.target_python_version):  # pragma: no cover
-        print(
-            f"Installed black doesn't support Python version {config.target_python_version.value}.\n"  # type: ignore
-            f'You have to install a newer black.\n'
-            f'Installed black version: {black.__version__}',
-            file=sys.stderr,
-        )
-        return Exit.ERROR
-
-    if config.debug:  # pragma: no cover
-        enable_debug_message()
-
-    if config.disable_warnings:
-        warnings.simplefilter('ignore')
-    extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]]
-    if config.extra_template_data is None:
-        extra_template_data = None
-    else:
-        with config.extra_template_data as data:
-            try:
-                extra_template_data = json.load(
-                    data, object_hook=lambda d: defaultdict(dict, **d)
-                )
-            except json.JSONDecodeError as e:
-                print(f'Unable to load extra template data: {e}', file=sys.stderr)
-                return Exit.ERROR
-
-    if config.aliases is None:
-        aliases = None
-    else:
-        with config.aliases as data:
-            try:
-                aliases = json.load(data)
-            except json.JSONDecodeError as e:
-                print(f'Unable to load alias mapping: {e}', file=sys.stderr)
-                return Exit.ERROR
-        if not isinstance(aliases, dict) or not all(
-            isinstance(k, str) and isinstance(v, str) for k, v in aliases.items()
-        ):
-            print(
-                'Alias mapping must be a JSON string mapping (e.g. {"from": "to", ...})',
-                file=sys.stderr,
-            )
-            return Exit.ERROR
-
-    if config.custom_formatters_kwargs is None:
-        custom_formatters_kwargs = None
-    else:
-        with config.custom_formatters_kwargs as data:
-            try:
-                custom_formatters_kwargs = json.load(data)
-            except json.JSONDecodeError as e:  # pragma: no cover
-                print(
-                    f'Unable to load custom_formatters_kwargs mapping: {e}',
-                    file=sys.stderr,
-                )
-                return Exit.ERROR
-        if not isinstance(custom_formatters_kwargs, dict) or not all(
-            isinstance(k, str) and isinstance(v, str)
-            for k, v in custom_formatters_kwargs.items()
-        ):  # pragma: no cover
-            print(
-                'Custom formatters kwargs mapping must be a JSON string mapping (e.g. {"from": "to", ...})',
-                file=sys.stderr,
-            )
-            return Exit.ERROR
-
-    try:
-        generate(
-            input_=config.url or config.input or sys.stdin.read(),
-            input_file_type=config.input_file_type,
-            output=config.output,
-            output_model_type=config.output_model_type,
-            target_python_version=config.target_python_version,
-            base_class=config.base_class,
-            additional_imports=config.additional_imports,
-            custom_template_dir=config.custom_template_dir,
-            validation=config.validation,
-            field_constraints=config.field_constraints,
-            snake_case_field=config.snake_case_field,
-            strip_default_none=config.strip_default_none,
-            extra_template_data=extra_template_data,
-            aliases=aliases,
-            disable_timestamp=config.disable_timestamp,
-            enable_version_header=config.enable_version_header,
-            allow_population_by_field_name=config.allow_population_by_field_name,
-            allow_extra_fields=config.allow_extra_fields,
-            apply_default_values_for_required_fields=config.use_default,
-            force_optional_for_required_fields=config.force_optional,
-            class_name=config.class_name,
-            use_standard_collections=config.use_standard_collections,
-            use_schema_description=config.use_schema_description,
-            use_field_description=config.use_field_description,
-            use_default_kwarg=config.use_default_kwarg,
-            reuse_model=config.reuse_model,
-            encoding=config.encoding,
-            enum_field_as_literal=config.enum_field_as_literal,
-            use_one_literal_as_default=config.use_one_literal_as_default,
-            set_default_enum_member=config.set_default_enum_member,
-            use_subclass_enum=config.use_subclass_enum,
-            strict_nullable=config.strict_nullable,
-            use_generic_container_types=config.use_generic_container_types,
-            enable_faux_immutability=config.enable_faux_immutability,
-            disable_appending_item_suffix=config.disable_appending_item_suffix,
-            strict_types=config.strict_types,
-            empty_enum_field_name=config.empty_enum_field_name,
-            field_extra_keys=config.field_extra_keys,
-            field_include_all_keys=config.field_include_all_keys,
-            field_extra_keys_without_x_prefix=config.field_extra_keys_without_x_prefix,
-            openapi_scopes=config.openapi_scopes,
-            wrap_string_literal=config.wrap_string_literal,
-            use_title_as_name=config.use_title_as_name,
-            use_operation_id_as_name=config.use_operation_id_as_name,
-            use_unique_items_as_set=config.use_unique_items_as_set,
-            http_headers=config.http_headers,
-            http_ignore_tls=config.http_ignore_tls,
-            use_annotated=config.use_annotated,
-            use_non_positive_negative_number_constrained_types=config.use_non_positive_negative_number_constrained_types,
-            original_field_name_delimiter=config.original_field_name_delimiter,
-            use_double_quotes=config.use_double_quotes,
-            collapse_root_models=config.collapse_root_models,
-            use_union_operator=config.use_union_operator,
-            special_field_name_prefix=config.special_field_name_prefix,
-            remove_special_field_name_prefix=config.remove_special_field_name_prefix,
-            capitalise_enum_members=config.capitalise_enum_members,
-            keep_model_order=config.keep_model_order,
-            custom_file_header=config.custom_file_header,
-            custom_file_header_path=config.custom_file_header_path,
-            custom_formatters=config.custom_formatters,
-            custom_formatters_kwargs=custom_formatters_kwargs,
-            use_pendulum=config.use_pendulum,
-            http_query_parameters=config.http_query_parameters,
-            treat_dots_as_module=config.treat_dot_as_module,
-            use_exact_imports=config.use_exact_imports,
-            union_mode=config.union_mode,
-            output_datetime_class=config.output_datetime_class,
-            keyword_only=config.keyword_only,
-            no_alias=config.no_alias,
-        )
-        return Exit.OK
-    except InvalidClassNameError as e:
-        print(f'{e} You have to set `--class-name` option', file=sys.stderr)
-        return Exit.ERROR
-    except Error as e:
-        print(str(e), file=sys.stderr)
-        return Exit.ERROR
-    except Exception:
-        import traceback
-
-        print(traceback.format_exc(), file=sys.stderr)
-        return Exit.ERROR
-
-
-if __name__ == '__main__':
-    sys.exit(main())
diff -pruN 0.26.4-3/datamodel_code_generator/arguments.py 0.34.0-1/datamodel_code_generator/arguments.py
--- 0.26.4-3/datamodel_code_generator/arguments.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/arguments.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,526 +0,0 @@
-from __future__ import annotations
-
-import locale
-from argparse import ArgumentParser, FileType, HelpFormatter, Namespace
-from operator import attrgetter
-from typing import TYPE_CHECKING
-
-from datamodel_code_generator import DataModelType, InputFileType, OpenAPIScope
-from datamodel_code_generator.format import DatetimeClassType, PythonVersion
-from datamodel_code_generator.model.pydantic_v2 import UnionMode
-from datamodel_code_generator.parser import LiteralType
-from datamodel_code_generator.types import StrictTypes
-
-if TYPE_CHECKING:
-    from argparse import Action
-    from typing import Iterable, Optional
-
-DEFAULT_ENCODING = locale.getpreferredencoding()
-
-namespace = Namespace(no_color=False)
-
-
-class SortingHelpFormatter(HelpFormatter):
-    def _bold_cyan(self, text: str) -> str:
-        return f'\x1b[36;1m{text}\x1b[0m'
-
-    def add_arguments(self, actions: Iterable[Action]) -> None:
-        actions = sorted(actions, key=attrgetter('option_strings'))
-        super().add_arguments(actions)
-
-    def start_section(self, heading: Optional[str]) -> None:
-        return super().start_section(
-            heading if namespace.no_color or not heading else self._bold_cyan(heading)
-        )
-
-
-arg_parser = ArgumentParser(
-    usage='\n  datamodel-codegen [options]',
-    description='Generate Python data models from schema definitions or structured data',
-    formatter_class=SortingHelpFormatter,
-    add_help=False,
-)
-
-base_options = arg_parser.add_argument_group('Options')
-typing_options = arg_parser.add_argument_group('Typing customization')
-field_options = arg_parser.add_argument_group('Field customization')
-model_options = arg_parser.add_argument_group('Model customization')
-template_options = arg_parser.add_argument_group('Template customization')
-openapi_options = arg_parser.add_argument_group('OpenAPI-only options')
-general_options = arg_parser.add_argument_group('General options')
-
-# ======================================================================================
-# Base options for input/output
-# ======================================================================================
-base_options.add_argument(
-    '--http-headers',
-    nargs='+',
-    metavar='HTTP_HEADER',
-    help='Set headers in HTTP requests to the remote host. (example: "Authorization: Basic dXNlcjpwYXNz")',
-)
-base_options.add_argument(
-    '--http-query-parameters',
-    nargs='+',
-    metavar='HTTP_QUERY_PARAMETERS',
-    help='Set query parameters in HTTP requests to the remote host. (example: "ref=branch")',
-)
-base_options.add_argument(
-    '--http-ignore-tls',
-    help="Disable verification of the remote host's TLS certificate",
-    action='store_true',
-    default=None,
-)
-base_options.add_argument(
-    '--input',
-    help='Input file/directory (default: stdin)',
-)
-base_options.add_argument(
-    '--input-file-type',
-    help='Input file type (default: auto)',
-    choices=[i.value for i in InputFileType],
-)
-base_options.add_argument(
-    '--output',
-    help='Output file (default: stdout)',
-)
-base_options.add_argument(
-    '--output-model-type',
-    help='Output model type (default: pydantic.BaseModel)',
-    choices=[i.value for i in DataModelType],
-)
-base_options.add_argument(
-    '--url',
-    help='Input file URL. `--input` is ignored when `--url` is used',
-)
-
-# ======================================================================================
-# Customization options for generated models
-# ======================================================================================
-model_options.add_argument(
-    '--allow-extra-fields',
-    help='Allow to pass extra fields, if this flag is not passed, extra fields are forbidden.',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--allow-population-by-field-name',
-    help='Allow population by field name',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--class-name',
-    help='Set class name of root model',
-    default=None,
-)
-model_options.add_argument(
-    '--collapse-root-models',
-    action='store_true',
-    default=None,
-    help='Models generated with a root-type field will be merged '
-    'into the models using that root-type model',
-)
-model_options.add_argument(
-    '--disable-appending-item-suffix',
-    help='Disable appending `Item` suffix to model name in an array',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--disable-timestamp',
-    help='Disable timestamp on file headers',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--enable-faux-immutability',
-    help='Enable faux immutability',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--enable-version-header',
-    help='Enable package version on file headers',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--keep-model-order',
-    help="Keep generated models' order",
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--keyword-only',
-    help='Defined models as keyword only (for example dataclass(kw_only=True)).',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--reuse-model',
-    help='Reuse models on the field when a module has the model with the same content',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--target-python-version',
-    help='target python version (default: 3.8)',
-    choices=[v.value for v in PythonVersion],
-)
-model_options.add_argument(
-    '--treat-dot-as-module',
-    help='treat dotted module names as modules',
-    action='store_true',
-    default=False,
-)
-model_options.add_argument(
-    '--use-schema-description',
-    help='Use schema description to populate class docstring',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--use-title-as-name',
-    help='use titles as class names of models',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--use-pendulum',
-    help='use pendulum instead of datetime',
-    action='store_true',
-    default=False,
-)
-model_options.add_argument(
-    '--use-exact-imports',
-    help='import exact types instead of modules, for example: "from .foo import Bar" instead of '
-    '"from . import foo" with "foo.Bar"',
-    action='store_true',
-    default=False,
-)
-model_options.add_argument(
-    '--output-datetime-class',
-    help='Choose Datetime class between AwareDatetime, NaiveDatetime or datetime. '
-    'Each output model has its default mapping (for example pydantic: datetime, dataclass: str, ...)',
-    choices=[i.value for i in DatetimeClassType],
-    default=None,
-)
-
-# ======================================================================================
-# Typing options for generated models
-# ======================================================================================
-typing_options.add_argument(
-    '--base-class',
-    help='Base Class (default: pydantic.BaseModel)',
-    type=str,
-)
-typing_options.add_argument(
-    '--enum-field-as-literal',
-    help='Parse enum field as literal. '
-    'all: all enum field type are Literal. '
-    'one: field type is Literal when an enum has only one possible value',
-    choices=[lt.value for lt in LiteralType],
-    default=None,
-)
-typing_options.add_argument(
-    '--field-constraints',
-    help='Use field constraints and not con* annotations',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--set-default-enum-member',
-    help='Set enum members as default values for enum field',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--strict-types',
-    help='Use strict types',
-    choices=[t.value for t in StrictTypes],
-    nargs='+',
-)
-typing_options.add_argument(
-    '--use-annotated',
-    help='Use typing.Annotated for Field(). Also, `--field-constraints` option will be enabled.',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--use-generic-container-types',
-    help='Use generic container types for type hinting (typing.Sequence, typing.Mapping). '
-    'If `--use-standard-collections` option is set, then import from collections.abc instead of typing',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--use-non-positive-negative-number-constrained-types',
-    help='Use the Non{Positive,Negative}{FloatInt} types instead of the corresponding con* constrained types.',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--use-one-literal-as-default',
-    help='Use one literal as default value for one literal field',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--use-standard-collections',
-    help='Use standard collections for type hinting (list, dict)',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--use-subclass-enum',
-    help='Define Enum class as subclass with field type when enum has type (int, float, bytes, str)',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--use-union-operator',
-    help='Use | operator for Union type (PEP 604).',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--use-unique-items-as-set',
-    help='define field type as `set` when the field attribute has `uniqueItems`',
-    action='store_true',
-    default=None,
-)
-
-# ======================================================================================
-# Customization options for generated model fields
-# ======================================================================================
-field_options.add_argument(
-    '--capitalise-enum-members',
-    '--capitalize-enum-members',
-    help='Capitalize field names on enum',
-    action='store_true',
-    default=None,
-)
-field_options.add_argument(
-    '--empty-enum-field-name',
-    help='Set field name when enum value is empty (default:  `_`)',
-    default=None,
-)
-field_options.add_argument(
-    '--field-extra-keys',
-    help='Add extra keys to field parameters',
-    type=str,
-    nargs='+',
-)
-field_options.add_argument(
-    '--field-extra-keys-without-x-prefix',
-    help='Add extra keys with `x-` prefix to field parameters. The extra keys are stripped of the `x-` prefix.',
-    type=str,
-    nargs='+',
-)
-field_options.add_argument(
-    '--field-include-all-keys',
-    help='Add all keys to field parameters',
-    action='store_true',
-    default=None,
-)
-field_options.add_argument(
-    '--force-optional',
-    help='Force optional for required fields',
-    action='store_true',
-    default=None,
-)
-field_options.add_argument(
-    '--original-field-name-delimiter',
-    help='Set delimiter to convert to snake case. This option only can be used with --snake-case-field (default: `_` )',
-    default=None,
-)
-field_options.add_argument(
-    '--remove-special-field-name-prefix',
-    help='Remove field name prefix if it has a special meaning e.g. underscores',
-    action='store_true',
-    default=None,
-)
-field_options.add_argument(
-    '--snake-case-field',
-    help='Change camel-case field name to snake-case',
-    action='store_true',
-    default=None,
-)
-field_options.add_argument(
-    '--special-field-name-prefix',
-    help="Set field name prefix when first character can't be used as Python field name (default:  `field`)",
-    default=None,
-)
-field_options.add_argument(
-    '--strip-default-none',
-    help='Strip default None on fields',
-    action='store_true',
-    default=None,
-)
-field_options.add_argument(
-    '--use-default',
-    help='Use default value even if a field is required',
-    action='store_true',
-    default=None,
-)
-field_options.add_argument(
-    '--use-default-kwarg',
-    action='store_true',
-    help='Use `default=` instead of a positional argument for Fields that have default values.',
-    default=None,
-)
-field_options.add_argument(
-    '--use-field-description',
-    help='Use schema description to populate field docstring',
-    action='store_true',
-    default=None,
-)
-field_options.add_argument(
-    '--union-mode',
-    help='Union mode for only pydantic v2 field',
-    choices=[u.value for u in UnionMode],
-    default=None,
-)
-field_options.add_argument(
-    '--no-alias',
-    help="""Do not add a field alias. E.g., if --snake-case-field is used along with a base class, which has an 
-            alias_generator""",
-    action='store_true',
-    default=None,
-)
-
-# ======================================================================================
-# Options for templating output
-# ======================================================================================
-template_options.add_argument(
-    '--aliases',
-    help='Alias mapping file',
-    type=FileType('rt'),
-)
-template_options.add_argument(
-    '--custom-file-header',
-    help='Custom file header',
-    type=str,
-    default=None,
-)
-template_options.add_argument(
-    '--custom-file-header-path',
-    help='Custom file header file path',
-    default=None,
-    type=str,
-)
-template_options.add_argument(
-    '--custom-template-dir',
-    help='Custom template directory',
-    type=str,
-)
-template_options.add_argument(
-    '--encoding',
-    help=f'The encoding of input and output (default: {DEFAULT_ENCODING})',
-    default=None,
-)
-template_options.add_argument(
-    '--extra-template-data',
-    help='Extra template data',
-    type=FileType('rt'),
-)
-template_options.add_argument(
-    '--use-double-quotes',
-    action='store_true',
-    default=None,
-    help='Model generated with double quotes. Single quotes or '
-    'your black config skip_string_normalization value will be used without this option.',
-)
-template_options.add_argument(
-    '--wrap-string-literal',
-    help='Wrap string literal by using black `experimental-string-processing` option (require black 20.8b0 or later)',
-    action='store_true',
-    default=None,
-)
-base_options.add_argument(
-    '--additional-imports',
-    help='Custom imports for output (delimited list input). For example "datetime.date,datetime.datetime"',
-    type=str,
-    default=None,
-)
-base_options.add_argument(
-    '--custom-formatters',
-    help='List of modules with custom formatter (delimited list input).',
-    type=str,
-    default=None,
-)
-template_options.add_argument(
-    '--custom-formatters-kwargs',
-    help='A file with kwargs for custom formatters.',
-    type=FileType('rt'),
-)
-
-# ======================================================================================
-# Options specific to OpenAPI input schemas
-# ======================================================================================
-openapi_options.add_argument(
-    '--openapi-scopes',
-    help='Scopes of OpenAPI model generation (default: schemas)',
-    choices=[o.value for o in OpenAPIScope],
-    nargs='+',
-    default=None,
-)
-openapi_options.add_argument(
-    '--strict-nullable',
-    help='Treat default field as a non-nullable field (Only OpenAPI)',
-    action='store_true',
-    default=None,
-)
-openapi_options.add_argument(
-    '--use-operation-id-as-name',
-    help='use operation id of OpenAPI as class names of models',
-    action='store_true',
-    default=None,
-)
-openapi_options.add_argument(
-    '--validation',
-    help='Deprecated: Enable validation (Only OpenAPI). this option is deprecated. it will be removed in future '
-    'releases',
-    action='store_true',
-    default=None,
-)
-
-# ======================================================================================
-# General options
-# ======================================================================================
-general_options.add_argument(
-    '--debug',
-    help='show debug message (require "debug". `$ pip install \'datamodel-code-generator[debug]\'`)',
-    action='store_true',
-    default=None,
-)
-general_options.add_argument(
-    '--disable-warnings',
-    help='disable warnings',
-    action='store_true',
-    default=None,
-)
-general_options.add_argument(
-    '-h',
-    '--help',
-    action='help',
-    default='==SUPPRESS==',
-    help='show this help message and exit',
-)
-general_options.add_argument(
-    '--no-color',
-    action='store_true',
-    default=False,
-    help='disable colorized output',
-)
-general_options.add_argument(
-    '--version',
-    action='store_true',
-    help='show version',
-)
-
-__all__ = [
-    'arg_parser',
-    'DEFAULT_ENCODING',
-    'namespace',
-]
diff -pruN 0.26.4-3/datamodel_code_generator/format.py 0.34.0-1/datamodel_code_generator/format.py
--- 0.26.4-3/datamodel_code_generator/format.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/format.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,277 +0,0 @@
-from __future__ import annotations
-
-from enum import Enum
-from importlib import import_module
-from pathlib import Path
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence
-from warnings import warn
-
-import black
-import isort
-
-from datamodel_code_generator.util import cached_property, load_toml
-
-try:
-    import black.mode
-except ImportError:  # pragma: no cover
-    black.mode = None
-
-
-class DatetimeClassType(Enum):
-    Datetime = 'datetime'
-    Awaredatetime = 'AwareDatetime'
-    Naivedatetime = 'NaiveDatetime'
-
-
-class PythonVersion(Enum):
-    PY_36 = '3.6'
-    PY_37 = '3.7'
-    PY_38 = '3.8'
-    PY_39 = '3.9'
-    PY_310 = '3.10'
-    PY_311 = '3.11'
-    PY_312 = '3.12'
-    PY_313 = '3.13'
-
-    @cached_property
-    def _is_py_38_or_later(self) -> bool:  # pragma: no cover
-        return self.value not in {self.PY_36.value, self.PY_37.value}  # type: ignore
-
-    @cached_property
-    def _is_py_39_or_later(self) -> bool:  # pragma: no cover
-        return self.value not in {self.PY_36.value, self.PY_37.value, self.PY_38.value}  # type: ignore
-
-    @cached_property
-    def _is_py_310_or_later(self) -> bool:  # pragma: no cover
-        return self.value not in {
-            self.PY_36.value,
-            self.PY_37.value,
-            self.PY_38.value,
-            self.PY_39.value,
-        }  # type: ignore
-
-    @cached_property
-    def _is_py_311_or_later(self) -> bool:  # pragma: no cover
-        return self.value not in {
-            self.PY_36.value,
-            self.PY_37.value,
-            self.PY_38.value,
-            self.PY_39.value,
-            self.PY_310.value,
-        }  # type: ignore
-
-    @property
-    def has_literal_type(self) -> bool:
-        return self._is_py_38_or_later
-
-    @property
-    def has_union_operator(self) -> bool:  # pragma: no cover
-        return self._is_py_310_or_later
-
-    @property
-    def has_annotated_type(self) -> bool:
-        return self._is_py_39_or_later
-
-    @property
-    def has_typed_dict(self) -> bool:
-        return self._is_py_38_or_later
-
-    @property
-    def has_typed_dict_non_required(self) -> bool:
-        return self._is_py_311_or_later
-
-    @property
-    def has_kw_only_dataclass(self) -> bool:
-        return self._is_py_310_or_later
-
-
-if TYPE_CHECKING:
-
-    class _TargetVersion(Enum): ...
-
-    BLACK_PYTHON_VERSION: Dict[PythonVersion, _TargetVersion]
-else:
-    BLACK_PYTHON_VERSION: Dict[PythonVersion, black.TargetVersion] = {
-        v: getattr(black.TargetVersion, f'PY{v.name.split("_")[-1]}')
-        for v in PythonVersion
-        if hasattr(black.TargetVersion, f'PY{v.name.split("_")[-1]}')
-    }
-
-
-def is_supported_in_black(python_version: PythonVersion) -> bool:  # pragma: no cover
-    return python_version in BLACK_PYTHON_VERSION
-
-
-def black_find_project_root(sources: Sequence[Path]) -> Path:
-    if TYPE_CHECKING:
-        from typing import Iterable, Tuple, Union
-
-        def _find_project_root(
-            srcs: Union[Sequence[str], Iterable[str]],
-        ) -> Union[Tuple[Path, str], Path]: ...
-
-    else:
-        from black import find_project_root as _find_project_root
-    project_root = _find_project_root(tuple(str(s) for s in sources))
-    if isinstance(project_root, tuple):
-        return project_root[0]
-    else:  # pragma: no cover
-        return project_root
-
-
-class CodeFormatter:
-    def __init__(
-        self,
-        python_version: PythonVersion,
-        settings_path: Optional[Path] = None,
-        wrap_string_literal: Optional[bool] = None,
-        skip_string_normalization: bool = True,
-        known_third_party: Optional[List[str]] = None,
-        custom_formatters: Optional[List[str]] = None,
-        custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
-    ) -> None:
-        if not settings_path:
-            settings_path = Path().resolve()
-
-        root = black_find_project_root((settings_path,))
-        path = root / 'pyproject.toml'
-        if path.is_file():
-            pyproject_toml = load_toml(path)
-            config = pyproject_toml.get('tool', {}).get('black', {})
-        else:
-            config = {}
-
-        black_kwargs: Dict[str, Any] = {}
-        if wrap_string_literal is not None:
-            experimental_string_processing = wrap_string_literal
-        else:
-            if black.__version__ < '24.1.0':  # type: ignore
-                experimental_string_processing = config.get(
-                    'experimental-string-processing'
-                )
-            else:
-                experimental_string_processing = config.get(
-                    'preview', False
-                ) and (  # pragma: no cover
-                    config.get('unstable', False)
-                    or 'string_processing' in config.get('enable-unstable-feature', [])
-                )
-
-        if experimental_string_processing is not None:  # pragma: no cover
-            if black.__version__.startswith('19.'):  # type: ignore
-                warn(
-                    f"black doesn't support `experimental-string-processing` option"  # type: ignore
-                    f' for wrapping string literal in {black.__version__}'
-                )
-            elif black.__version__ < '24.1.0':  # type: ignore
-                black_kwargs['experimental_string_processing'] = (
-                    experimental_string_processing
-                )
-            elif experimental_string_processing:
-                black_kwargs['preview'] = True
-                black_kwargs['unstable'] = config.get('unstable', False)
-                black_kwargs['enabled_features'] = {
-                    black.mode.Preview.string_processing
-                }
-
-        if TYPE_CHECKING:
-            self.black_mode: black.FileMode
-        else:
-            self.black_mode = black.FileMode(
-                target_versions={BLACK_PYTHON_VERSION[python_version]},
-                line_length=config.get('line-length', black.DEFAULT_LINE_LENGTH),
-                string_normalization=not skip_string_normalization
-                or not config.get('skip-string-normalization', True),
-                **black_kwargs,
-            )
-
-        self.settings_path: str = str(settings_path)
-
-        self.isort_config_kwargs: Dict[str, Any] = {}
-        if known_third_party:
-            self.isort_config_kwargs['known_third_party'] = known_third_party
-
-        if isort.__version__.startswith('4.'):
-            self.isort_config = None
-        else:
-            self.isort_config = isort.Config(
-                settings_path=self.settings_path, **self.isort_config_kwargs
-            )
-
-        self.custom_formatters_kwargs = custom_formatters_kwargs or {}
-        self.custom_formatters = self._check_custom_formatters(custom_formatters)
-
-    def _load_custom_formatter(
-        self, custom_formatter_import: str
-    ) -> CustomCodeFormatter:
-        import_ = import_module(custom_formatter_import)
-
-        if not hasattr(import_, 'CodeFormatter'):
-            raise NameError(
-                f'Custom formatter module `{import_.__name__}` must contains object with name Formatter'
-            )
-
-        formatter_class = import_.__getattribute__('CodeFormatter')
-
-        if not issubclass(formatter_class, CustomCodeFormatter):
-            raise TypeError(
-                f'The custom module {custom_formatter_import} must inherit from `datamodel-code-generator`'
-            )
-
-        return formatter_class(formatter_kwargs=self.custom_formatters_kwargs)
-
-    def _check_custom_formatters(
-        self, custom_formatters: Optional[List[str]]
-    ) -> List[CustomCodeFormatter]:
-        if custom_formatters is None:
-            return []
-
-        return [
-            self._load_custom_formatter(custom_formatter_import)
-            for custom_formatter_import in custom_formatters
-        ]
-
-    def format_code(
-        self,
-        code: str,
-    ) -> str:
-        code = self.apply_isort(code)
-        code = self.apply_black(code)
-
-        for formatter in self.custom_formatters:
-            code = formatter.apply(code)
-
-        return code
-
-    def apply_black(self, code: str) -> str:
-        return black.format_str(
-            code,
-            mode=self.black_mode,
-        )
-
-    if TYPE_CHECKING:
-
-        def apply_isort(self, code: str) -> str: ...
-
-    else:
-        if isort.__version__.startswith('4.'):
-
-            def apply_isort(self, code: str) -> str:
-                return isort.SortImports(
-                    file_contents=code,
-                    settings_path=self.settings_path,
-                    **self.isort_config_kwargs,
-                ).output
-
-        else:
-
-            def apply_isort(self, code: str) -> str:
-                return isort.code(code, config=self.isort_config)
-
-
-class CustomCodeFormatter:
-    def __init__(self, formatter_kwargs: Dict[str, Any]) -> None:
-        self.formatter_kwargs = formatter_kwargs
-
-    def apply(self, code: str) -> str:
-        raise NotImplementedError
diff -pruN 0.26.4-3/datamodel_code_generator/http.py 0.34.0-1/datamodel_code_generator/http.py
--- 0.26.4-3/datamodel_code_generator/http.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/http.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,29 +0,0 @@
-from __future__ import annotations
-
-from typing import Optional, Sequence, Tuple
-
-try:
-    import httpx
-except ImportError:  # pragma: no cover
-    raise Exception(
-        "Please run `$pip install 'datamodel-code-generator[http]`' to resolve URL Reference"
-    )
-
-
-def get_body(
-    url: str,
-    headers: Optional[Sequence[Tuple[str, str]]] = None,
-    ignore_tls: bool = False,
-    query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
-) -> str:
-    return httpx.get(
-        url,
-        headers=headers,
-        verify=not ignore_tls,
-        follow_redirects=True,
-        params=query_parameters,
-    ).text
-
-
-def join_url(url: str, ref: str = '.') -> str:
-    return str(httpx.URL(url).join(ref))
diff -pruN 0.26.4-3/datamodel_code_generator/imports.py 0.34.0-1/datamodel_code_generator/imports.py
--- 0.26.4-3/datamodel_code_generator/imports.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/imports.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,127 +0,0 @@
-from __future__ import annotations
-
-from collections import defaultdict
-from functools import lru_cache
-from typing import DefaultDict, Dict, Iterable, List, Optional, Set, Tuple, Union
-
-from datamodel_code_generator.util import BaseModel
-
-
-class Import(BaseModel):
-    from_: Optional[str] = None
-    import_: str
-    alias: Optional[str] = None
-    reference_path: Optional[str] = None
-
-    @classmethod
-    @lru_cache()
-    def from_full_path(cls, class_path: str) -> Import:
-        split_class_path: List[str] = class_path.split('.')
-        return Import(
-            from_='.'.join(split_class_path[:-1]) or None, import_=split_class_path[-1]
-        )
-
-
-class Imports(DefaultDict[Optional[str], Set[str]]):
-    def __str__(self) -> str:
-        return self.dump()
-
-    def __init__(self, use_exact: bool = False) -> None:
-        super().__init__(set)
-        self.alias: DefaultDict[Optional[str], Dict[str, str]] = defaultdict(dict)
-        self.counter: Dict[Tuple[Optional[str], str], int] = defaultdict(int)
-        self.reference_paths: Dict[str, Import] = {}
-        self.use_exact: bool = use_exact
-
-    def _set_alias(self, from_: Optional[str], imports: Set[str]) -> List[str]:
-        return [
-            f'{i} as {self.alias[from_][i]}'
-            if i in self.alias[from_] and i != self.alias[from_][i]
-            else i
-            for i in sorted(imports)
-        ]
-
-    def create_line(self, from_: Optional[str], imports: Set[str]) -> str:
-        if from_:
-            return f"from {from_} import {', '.join(self._set_alias(from_, imports))}"
-        return '\n'.join(f'import {i}' for i in self._set_alias(from_, imports))
-
-    def dump(self) -> str:
-        return '\n'.join(
-            self.create_line(from_, imports) for from_, imports in self.items()
-        )
-
-    def append(self, imports: Union[Import, Iterable[Import], None]) -> None:
-        if imports:
-            if isinstance(imports, Import):
-                imports = [imports]
-            for import_ in imports:
-                if import_.reference_path:
-                    self.reference_paths[import_.reference_path] = import_
-                if '.' in import_.import_:
-                    self[None].add(import_.import_)
-                    self.counter[(None, import_.import_)] += 1
-                else:
-                    self[import_.from_].add(import_.import_)
-                    self.counter[(import_.from_, import_.import_)] += 1
-                    if import_.alias:
-                        self.alias[import_.from_][import_.import_] = import_.alias
-
-    def remove(self, imports: Union[Import, Iterable[Import]]) -> None:
-        if isinstance(imports, Import):  # pragma: no cover
-            imports = [imports]
-        for import_ in imports:
-            if '.' in import_.import_:  # pragma: no cover
-                self.counter[(None, import_.import_)] -= 1
-                if self.counter[(None, import_.import_)] == 0:  # pragma: no cover
-                    self[None].remove(import_.import_)
-                    if not self[None]:
-                        del self[None]
-            else:
-                self.counter[(import_.from_, import_.import_)] -= 1  # pragma: no cover
-                if (
-                    self.counter[(import_.from_, import_.import_)] == 0
-                ):  # pragma: no cover
-                    self[import_.from_].remove(import_.import_)
-                    if not self[import_.from_]:
-                        del self[import_.from_]
-                    if import_.alias:  # pragma: no cover
-                        del self.alias[import_.from_][import_.import_]
-                        if not self.alias[import_.from_]:
-                            del self.alias[import_.from_]
-
-    def remove_referenced_imports(self, reference_path: str) -> None:
-        if reference_path in self.reference_paths:
-            self.remove(self.reference_paths[reference_path])
-
-
-IMPORT_ANNOTATED = Import.from_full_path('typing.Annotated')
-IMPORT_ANNOTATED_BACKPORT = Import.from_full_path('typing_extensions.Annotated')
-IMPORT_ANY = Import.from_full_path('typing.Any')
-IMPORT_LIST = Import.from_full_path('typing.List')
-IMPORT_SET = Import.from_full_path('typing.Set')
-IMPORT_UNION = Import.from_full_path('typing.Union')
-IMPORT_OPTIONAL = Import.from_full_path('typing.Optional')
-IMPORT_LITERAL = Import.from_full_path('typing.Literal')
-IMPORT_TYPE_ALIAS = Import.from_full_path('typing.TypeAlias')
-IMPORT_LITERAL_BACKPORT = Import.from_full_path('typing_extensions.Literal')
-IMPORT_SEQUENCE = Import.from_full_path('typing.Sequence')
-IMPORT_FROZEN_SET = Import.from_full_path('typing.FrozenSet')
-IMPORT_MAPPING = Import.from_full_path('typing.Mapping')
-IMPORT_ABC_SEQUENCE = Import.from_full_path('collections.abc.Sequence')
-IMPORT_ABC_SET = Import.from_full_path('collections.abc.Set')
-IMPORT_ABC_MAPPING = Import.from_full_path('collections.abc.Mapping')
-IMPORT_ENUM = Import.from_full_path('enum.Enum')
-IMPORT_ANNOTATIONS = Import.from_full_path('__future__.annotations')
-IMPORT_DICT = Import.from_full_path('typing.Dict')
-IMPORT_DECIMAL = Import.from_full_path('decimal.Decimal')
-IMPORT_DATE = Import.from_full_path('datetime.date')
-IMPORT_DATETIME = Import.from_full_path('datetime.datetime')
-IMPORT_TIMEDELTA = Import.from_full_path('datetime.timedelta')
-IMPORT_PATH = Import.from_full_path('pathlib.Path')
-IMPORT_TIME = Import.from_full_path('datetime.time')
-IMPORT_UUID = Import.from_full_path('uuid.UUID')
-IMPORT_PENDULUM_DATE = Import.from_full_path('pendulum.Date')
-IMPORT_PENDULUM_DATETIME = Import.from_full_path('pendulum.DateTime')
-IMPORT_PENDULUM_DURATION = Import.from_full_path('pendulum.Duration')
-IMPORT_PENDULUM_TIME = Import.from_full_path('pendulum.Time')
diff -pruN 0.26.4-3/datamodel_code_generator/model/__init__.py 0.34.0-1/datamodel_code_generator/model/__init__.py
--- 0.26.4-3/datamodel_code_generator/model/__init__.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/model/__init__.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,91 +0,0 @@
-from __future__ import annotations
-
-import sys
-from typing import TYPE_CHECKING, Callable, Iterable, List, NamedTuple, Optional, Type
-
-from .. import DatetimeClassType, PythonVersion
-from ..types import DataTypeManager as DataTypeManagerABC
-from .base import ConstraintsBase, DataModel, DataModelFieldBase
-
-if TYPE_CHECKING:
-    from .. import DataModelType
-
-DEFAULT_TARGET_DATETIME_CLASS = DatetimeClassType.Datetime
-DEFAULT_TARGET_PYTHON_VERSION = PythonVersion(
-    f'{sys.version_info.major}.{sys.version_info.minor}'
-)
-
-
-class DataModelSet(NamedTuple):
-    data_model: Type[DataModel]
-    root_model: Type[DataModel]
-    field_model: Type[DataModelFieldBase]
-    data_type_manager: Type[DataTypeManagerABC]
-    dump_resolve_reference_action: Optional[Callable[[Iterable[str]], str]]
-    known_third_party: Optional[List[str]] = None
-
-
-def get_data_model_types(
-    data_model_type: DataModelType,
-    target_python_version: PythonVersion = DEFAULT_TARGET_PYTHON_VERSION,
-    target_datetime_class: DatetimeClassType = DEFAULT_TARGET_DATETIME_CLASS,
-) -> DataModelSet:
-    from .. import DataModelType
-    from . import dataclass, msgspec, pydantic, pydantic_v2, rootmodel, typed_dict
-    from .types import DataTypeManager
-
-    if data_model_type == DataModelType.PydanticBaseModel:
-        return DataModelSet(
-            data_model=pydantic.BaseModel,
-            root_model=pydantic.CustomRootType,
-            field_model=pydantic.DataModelField,
-            data_type_manager=pydantic.DataTypeManager,
-            dump_resolve_reference_action=pydantic.dump_resolve_reference_action,
-        )
-    elif data_model_type == DataModelType.PydanticV2BaseModel:
-        return DataModelSet(
-            data_model=pydantic_v2.BaseModel,
-            root_model=pydantic_v2.RootModel,
-            field_model=pydantic_v2.DataModelField,
-            data_type_manager=pydantic_v2.DataTypeManager,
-            dump_resolve_reference_action=pydantic_v2.dump_resolve_reference_action,
-        )
-    elif data_model_type == DataModelType.DataclassesDataclass:
-        return DataModelSet(
-            data_model=dataclass.DataClass,
-            root_model=rootmodel.RootModel,
-            field_model=dataclass.DataModelField,
-            data_type_manager=dataclass.DataTypeManager,
-            dump_resolve_reference_action=None,
-        )
-    elif data_model_type == DataModelType.TypingTypedDict:
-        return DataModelSet(
-            data_model=(
-                typed_dict.TypedDict
-                if target_python_version.has_typed_dict
-                else typed_dict.TypedDictBackport
-            ),
-            root_model=rootmodel.RootModel,
-            field_model=(
-                typed_dict.DataModelField
-                if target_python_version.has_typed_dict_non_required
-                else typed_dict.DataModelFieldBackport
-            ),
-            data_type_manager=DataTypeManager,
-            dump_resolve_reference_action=None,
-        )
-    elif data_model_type == DataModelType.MsgspecStruct:
-        return DataModelSet(
-            data_model=msgspec.Struct,
-            root_model=msgspec.RootModel,
-            field_model=msgspec.DataModelField,
-            data_type_manager=msgspec.DataTypeManager,
-            dump_resolve_reference_action=None,
-            known_third_party=['msgspec'],
-        )
-    raise ValueError(
-        f'{data_model_type} is unsupported data model type'
-    )  # pragma: no cover
-
-
-__all__ = ['ConstraintsBase', 'DataModel', 'DataModelFieldBase']
diff -pruN 0.26.4-3/datamodel_code_generator/model/base.py 0.34.0-1/datamodel_code_generator/model/base.py
--- 0.26.4-3/datamodel_code_generator/model/base.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/model/base.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,468 +0,0 @@
-from abc import ABC, abstractmethod
-from collections import defaultdict
-from copy import deepcopy
-from functools import lru_cache
-from pathlib import Path
-from typing import (
-    TYPE_CHECKING,
-    Any,
-    ClassVar,
-    DefaultDict,
-    Dict,
-    FrozenSet,
-    Iterator,
-    List,
-    Optional,
-    Set,
-    Tuple,
-    TypeVar,
-    Union,
-)
-from warnings import warn
-
-from jinja2 import Environment, FileSystemLoader, Template
-from pydantic import Field
-
-from datamodel_code_generator.imports import (
-    IMPORT_ANNOTATED,
-    IMPORT_ANNOTATED_BACKPORT,
-    IMPORT_OPTIONAL,
-    IMPORT_UNION,
-    Import,
-)
-from datamodel_code_generator.reference import Reference, _BaseModel
-from datamodel_code_generator.types import (
-    ANY,
-    NONE,
-    UNION_PREFIX,
-    DataType,
-    Nullable,
-    chain_as_tuple,
-    get_optional_type,
-)
-from datamodel_code_generator.util import PYDANTIC_V2, ConfigDict, cached_property
-
-TEMPLATE_DIR: Path = Path(__file__).parents[0] / 'template'
-
-ALL_MODEL: str = '#all#'
-
-ConstraintsBaseT = TypeVar('ConstraintsBaseT', bound='ConstraintsBase')
-
-
-class ConstraintsBase(_BaseModel):
-    unique_items: Optional[bool] = Field(None, alias='uniqueItems')
-    _exclude_fields: ClassVar[Set[str]] = {'has_constraints'}
-    if PYDANTIC_V2:
-        model_config = ConfigDict(
-            arbitrary_types_allowed=True, ignored_types=(cached_property,)
-        )
-    else:
-
-        class Config:
-            arbitrary_types_allowed = True
-            keep_untouched = (cached_property,)
-
-    @cached_property
-    def has_constraints(self) -> bool:
-        return any(v is not None for v in self.dict().values())
-
-    @staticmethod
-    def merge_constraints(
-        a: ConstraintsBaseT, b: ConstraintsBaseT
-    ) -> Optional[ConstraintsBaseT]:
-        constraints_class = None
-        if isinstance(a, ConstraintsBase):  # pragma: no cover
-            root_type_field_constraints = {
-                k: v for k, v in a.dict(by_alias=True).items() if v is not None
-            }
-            constraints_class = a.__class__
-        else:
-            root_type_field_constraints = {}  # pragma: no cover
-
-        if isinstance(b, ConstraintsBase):  # pragma: no cover
-            model_field_constraints = {
-                k: v for k, v in b.dict(by_alias=True).items() if v is not None
-            }
-            constraints_class = constraints_class or b.__class__
-        else:
-            model_field_constraints = {}
-
-        if not issubclass(constraints_class, ConstraintsBase):  # pragma: no cover
-            return None
-
-        return constraints_class.parse_obj(
-            {
-                **root_type_field_constraints,
-                **model_field_constraints,
-            }
-        )
-
-
-class DataModelFieldBase(_BaseModel):
-    name: Optional[str] = None
-    default: Optional[Any] = None
-    required: bool = False
-    alias: Optional[str] = None
-    data_type: DataType
-    constraints: Any = None
-    strip_default_none: bool = False
-    nullable: Optional[bool] = None
-    parent: Optional[Any] = None
-    extras: Dict[str, Any] = {}
-    use_annotated: bool = False
-    has_default: bool = False
-    use_field_description: bool = False
-    const: bool = False
-    original_name: Optional[str] = None
-    use_default_kwarg: bool = False
-    use_one_literal_as_default: bool = False
-    _exclude_fields: ClassVar[Set[str]] = {'parent'}
-    _pass_fields: ClassVar[Set[str]] = {'parent', 'data_type'}
-    can_have_extra_keys: ClassVar[bool] = True
-    type_has_null: Optional[bool] = None
-
-    if not TYPE_CHECKING:
-
-        def __init__(self, **data: Any) -> None:
-            super().__init__(**data)
-            if self.data_type.reference or self.data_type.data_types:
-                self.data_type.parent = self
-            self.process_const()
-
-    def process_const(self) -> None:
-        if 'const' not in self.extras:
-            return None
-        self.default = self.extras['const']
-        self.const = True
-        self.required = False
-        self.nullable = False
-
-    @property
-    def type_hint(self) -> str:
-        type_hint = self.data_type.type_hint
-
-        if not type_hint:
-            return NONE
-        elif self.has_default_factory:
-            return type_hint
-        elif self.data_type.is_optional and self.data_type.type != ANY:
-            return type_hint
-        elif self.nullable is not None:
-            if self.nullable:
-                return get_optional_type(type_hint, self.data_type.use_union_operator)
-            return type_hint
-        elif self.required:
-            if self.type_has_null:
-                return get_optional_type(type_hint, self.data_type.use_union_operator)
-            return type_hint
-        elif self.fall_back_to_nullable:
-            return get_optional_type(type_hint, self.data_type.use_union_operator)
-        else:
-            return type_hint
-
-    @property
-    def imports(self) -> Tuple[Import, ...]:
-        type_hint = self.type_hint
-        has_union = not self.data_type.use_union_operator and UNION_PREFIX in type_hint
-        imports: List[Union[Tuple[Import], Iterator[Import]]] = [
-            (
-                i
-                for i in self.data_type.all_imports
-                if not (not has_union and i == IMPORT_UNION)
-            )
-        ]
-
-        if self.fall_back_to_nullable:
-            if (
-                self.nullable or (self.nullable is None and not self.required)
-            ) and not self.data_type.use_union_operator:
-                imports.append((IMPORT_OPTIONAL,))
-        else:
-            if (
-                self.nullable and not self.data_type.use_union_operator
-            ):  # pragma: no cover
-                imports.append((IMPORT_OPTIONAL,))
-        if self.use_annotated and self.annotated:
-            import_annotated = (
-                IMPORT_ANNOTATED
-                if self.data_type.python_version.has_annotated_type
-                else IMPORT_ANNOTATED_BACKPORT
-            )
-            imports.append((import_annotated,))
-        return chain_as_tuple(*imports)
-
-    @property
-    def docstring(self) -> Optional[str]:
-        if self.use_field_description:
-            description = self.extras.get('description', None)
-            if description is not None:
-                return f'{description}'
-        return None
-
-    @property
-    def unresolved_types(self) -> FrozenSet[str]:
-        return self.data_type.unresolved_types
-
-    @property
-    def field(self) -> Optional[str]:
-        """for backwards compatibility"""
-        return None
-
-    @property
-    def method(self) -> Optional[str]:
-        return None
-
-    @property
-    def represented_default(self) -> str:
-        return repr(self.default)
-
-    @property
-    def annotated(self) -> Optional[str]:
-        return None
-
-    @property
-    def has_default_factory(self) -> bool:
-        return 'default_factory' in self.extras
-
-    @property
-    def fall_back_to_nullable(self) -> bool:
-        return True
-
-
-@lru_cache()
-def get_template(template_file_path: Path) -> Template:
-    loader = FileSystemLoader(str(TEMPLATE_DIR / template_file_path.parent))
-    environment: Environment = Environment(loader=loader)
-    return environment.get_template(template_file_path.name)
-
-
-def get_module_path(name: str, file_path: Optional[Path]) -> List[str]:
-    if file_path:
-        return [
-            *file_path.parts[:-1],
-            file_path.stem,
-            *name.split('.')[:-1],
-        ]
-    return name.split('.')[:-1]
-
-
-def get_module_name(name: str, file_path: Optional[Path]) -> str:
-    return '.'.join(get_module_path(name, file_path))
-
-
-class TemplateBase(ABC):
-    @property
-    @abstractmethod
-    def template_file_path(self) -> Path:
-        raise NotImplementedError
-
-    @cached_property
-    def template(self) -> Template:
-        return get_template(self.template_file_path)
-
-    @abstractmethod
-    def render(self) -> str:
-        raise NotImplementedError
-
-    def _render(self, *args: Any, **kwargs: Any) -> str:
-        return self.template.render(*args, **kwargs)
-
-    def __str__(self) -> str:
-        return self.render()
-
-
-class BaseClassDataType(DataType): ...
-
-
-UNDEFINED: Any = object()
-
-
-class DataModel(TemplateBase, Nullable, ABC):
-    TEMPLATE_FILE_PATH: ClassVar[str] = ''
-    BASE_CLASS: ClassVar[str] = ''
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = ()
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        methods: Optional[List[str]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ) -> None:
-        self.keyword_only = keyword_only
-        if not self.TEMPLATE_FILE_PATH:
-            raise Exception('TEMPLATE_FILE_PATH is undefined')
-
-        self._custom_template_dir: Optional[Path] = custom_template_dir
-        self.decorators: List[str] = decorators or []
-        self._additional_imports: List[Import] = []
-        self.custom_base_class = custom_base_class
-        if base_classes:
-            self.base_classes: List[BaseClassDataType] = [
-                BaseClassDataType(reference=b) for b in base_classes
-            ]
-        else:
-            self.set_base_class()
-
-        self.file_path: Optional[Path] = path
-        self.reference: Reference = reference
-
-        self.reference.source = self
-
-        self.extra_template_data = (
-            # The supplied defaultdict will either create a new entry,
-            # or already contain a predefined entry for this type
-            extra_template_data[self.name]
-            if extra_template_data is not None
-            else defaultdict(dict)
-        )
-
-        self.fields = self._validate_fields(fields) if fields else []
-
-        for base_class in self.base_classes:
-            if base_class.reference:
-                base_class.reference.children.append(self)
-
-        if extra_template_data is not None:
-            all_model_extra_template_data = extra_template_data.get(ALL_MODEL)
-            if all_model_extra_template_data:
-                # The deepcopy is needed here to ensure that different models don't
-                # end up inadvertently sharing state (such as "base_class_kwargs")
-                self.extra_template_data.update(deepcopy(all_model_extra_template_data))
-
-        self.methods: List[str] = methods or []
-
-        self.description = description
-        for field in self.fields:
-            field.parent = self
-
-        self._additional_imports.extend(self.DEFAULT_IMPORTS)
-        self.default: Any = default
-        self._nullable: bool = nullable
-
-    def _validate_fields(
-        self, fields: List[DataModelFieldBase]
-    ) -> List[DataModelFieldBase]:
-        names: Set[str] = set()
-        unique_fields: List[DataModelFieldBase] = []
-        for field in fields:
-            if field.name:
-                if field.name in names:
-                    warn(f'Field name `{field.name}` is duplicated on {self.name}')
-                    continue
-                else:
-                    names.add(field.name)
-            unique_fields.append(field)
-        return unique_fields
-
-    def set_base_class(self) -> None:
-        base_class = self.custom_base_class or self.BASE_CLASS
-        if not base_class:
-            self.base_classes = []
-            return None
-        base_class_import = Import.from_full_path(base_class)
-        self._additional_imports.append(base_class_import)
-        self.base_classes = [BaseClassDataType.from_import(base_class_import)]
-
-    @cached_property
-    def template_file_path(self) -> Path:
-        template_file_path = Path(self.TEMPLATE_FILE_PATH)
-        if self._custom_template_dir is not None:
-            custom_template_file_path = self._custom_template_dir / template_file_path
-            if custom_template_file_path.exists():
-                return custom_template_file_path
-        return template_file_path
-
-    @property
-    def imports(self) -> Tuple[Import, ...]:
-        return chain_as_tuple(
-            (i for f in self.fields for i in f.imports),
-            self._additional_imports,
-        )
-
-    @property
-    def reference_classes(self) -> FrozenSet[str]:
-        return frozenset(
-            {r.reference.path for r in self.base_classes if r.reference}
-            | {t for f in self.fields for t in f.unresolved_types}
-        )
-
-    @property
-    def name(self) -> str:
-        return self.reference.name
-
-    @property
-    def duplicate_name(self) -> str:
-        return self.reference.duplicate_name or ''
-
-    @property
-    def base_class(self) -> str:
-        return ', '.join(b.type_hint for b in self.base_classes)
-
-    @staticmethod
-    def _get_class_name(name: str) -> str:
-        if '.' in name:
-            return name.rsplit('.', 1)[-1]
-        return name
-
-    @property
-    def class_name(self) -> str:
-        return self._get_class_name(self.name)
-
-    @class_name.setter
-    def class_name(self, class_name: str) -> None:
-        if '.' in self.reference.name:
-            self.reference.name = (
-                f"{self.reference.name.rsplit('.', 1)[0]}.{class_name}"
-            )
-        else:
-            self.reference.name = class_name
-
-    @property
-    def duplicate_class_name(self) -> str:
-        return self._get_class_name(self.duplicate_name)
-
-    @property
-    def module_path(self) -> List[str]:
-        return get_module_path(self.name, self.file_path)
-
-    @property
-    def module_name(self) -> str:
-        return get_module_name(self.name, self.file_path)
-
-    @property
-    def all_data_types(self) -> Iterator[DataType]:
-        for field in self.fields:
-            yield from field.data_type.all_data_types
-        yield from self.base_classes
-
-    @property
-    def nullable(self) -> bool:
-        return self._nullable
-
-    @cached_property
-    def path(self) -> str:
-        return self.reference.path
-
-    def render(self, *, class_name: Optional[str] = None) -> str:
-        response = self._render(
-            class_name=class_name or self.class_name,
-            fields=self.fields,
-            decorators=self.decorators,
-            base_class=self.base_class,
-            methods=self.methods,
-            description=self.description,
-            keyword_only=self.keyword_only,
-            **self.extra_template_data,
-        )
-        return response
diff -pruN 0.26.4-3/datamodel_code_generator/model/dataclass.py 0.34.0-1/datamodel_code_generator/model/dataclass.py
--- 0.26.4-3/datamodel_code_generator/model/dataclass.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/model/dataclass.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,181 +0,0 @@
-from pathlib import Path
-from typing import (
-    Any,
-    ClassVar,
-    DefaultDict,
-    Dict,
-    List,
-    Optional,
-    Sequence,
-    Set,
-    Tuple,
-)
-
-from datamodel_code_generator import DatetimeClassType, PythonVersion
-from datamodel_code_generator.imports import (
-    IMPORT_DATE,
-    IMPORT_DATETIME,
-    IMPORT_TIME,
-    IMPORT_TIMEDELTA,
-    Import,
-)
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model.base import UNDEFINED
-from datamodel_code_generator.model.imports import IMPORT_DATACLASS, IMPORT_FIELD
-from datamodel_code_generator.model.pydantic.base_model import Constraints
-from datamodel_code_generator.model.types import DataTypeManager as _DataTypeManager
-from datamodel_code_generator.model.types import type_map_factory
-from datamodel_code_generator.reference import Reference
-from datamodel_code_generator.types import DataType, StrictTypes, Types, chain_as_tuple
-
-
-def _has_field_assignment(field: DataModelFieldBase) -> bool:
-    return bool(field.field) or not (
-        field.required
-        or (field.represented_default == 'None' and field.strip_default_none)
-    )
-
-
-class DataClass(DataModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'dataclass.jinja2'
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_DATACLASS,)
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        methods: Optional[List[str]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ) -> None:
-        super().__init__(
-            reference=reference,
-            fields=sorted(fields, key=_has_field_assignment, reverse=False),
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            methods=methods,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
-
-
-class DataModelField(DataModelFieldBase):
-    _FIELD_KEYS: ClassVar[Set[str]] = {
-        'default_factory',
-        'init',
-        'repr',
-        'hash',
-        'compare',
-        'metadata',
-        'kw_only',
-    }
-    constraints: Optional[Constraints] = None
-
-    @property
-    def imports(self) -> Tuple[Import, ...]:
-        field = self.field
-        if field and field.startswith('field('):
-            return chain_as_tuple(super().imports, (IMPORT_FIELD,))
-        return super().imports
-
-    def self_reference(self) -> bool:  # pragma: no cover
-        return isinstance(self.parent, DataClass) and self.parent.reference.path in {
-            d.reference.path for d in self.data_type.all_data_types if d.reference
-        }
-
-    @property
-    def field(self) -> Optional[str]:
-        """for backwards compatibility"""
-        result = str(self)
-        if result == '':
-            return None
-
-        return result
-
-    def __str__(self) -> str:
-        data: Dict[str, Any] = {
-            k: v for k, v in self.extras.items() if k in self._FIELD_KEYS
-        }
-
-        if self.default != UNDEFINED and self.default is not None:
-            data['default'] = self.default
-
-        if self.required:
-            data = {
-                k: v
-                for k, v in data.items()
-                if k
-                not in (
-                    'default',
-                    'default_factory',
-                )
-            }
-
-        if not data:
-            return ''
-
-        if len(data) == 1 and 'default' in data:
-            default = data['default']
-
-            if isinstance(default, (list, dict)):
-                return f'field(default_factory=lambda :{repr(default)})'
-            return repr(default)
-        kwargs = [
-            f'{k}={v if k == "default_factory" else repr(v)}' for k, v in data.items()
-        ]
-        return f'field({", ".join(kwargs)})'
-
-
-class DataTypeManager(_DataTypeManager):
-    def __init__(
-        self,
-        python_version: PythonVersion = PythonVersion.PY_38,
-        use_standard_collections: bool = False,
-        use_generic_container_types: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        use_union_operator: bool = False,
-        use_pendulum: bool = False,
-        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
-    ):
-        super().__init__(
-            python_version,
-            use_standard_collections,
-            use_generic_container_types,
-            strict_types,
-            use_non_positive_negative_number_constrained_types,
-            use_union_operator,
-            use_pendulum,
-            target_datetime_class,
-        )
-
-        datetime_map = (
-            {
-                Types.time: self.data_type.from_import(IMPORT_TIME),
-                Types.date: self.data_type.from_import(IMPORT_DATE),
-                Types.date_time: self.data_type.from_import(IMPORT_DATETIME),
-                Types.timedelta: self.data_type.from_import(IMPORT_TIMEDELTA),
-            }
-            if target_datetime_class is DatetimeClassType.Datetime
-            else {}
-        )
-
-        self.type_map: Dict[Types, DataType] = {
-            **type_map_factory(self.data_type),
-            **datetime_map,
-        }
diff -pruN 0.26.4-3/datamodel_code_generator/model/enum.py 0.34.0-1/datamodel_code_generator/model/enum.py
--- 0.26.4-3/datamodel_code_generator/model/enum.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/model/enum.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,102 +0,0 @@
-from __future__ import annotations
-
-from pathlib import Path
-from typing import Any, ClassVar, DefaultDict, Dict, List, Optional, Tuple
-
-from datamodel_code_generator.imports import IMPORT_ANY, IMPORT_ENUM, Import
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model.base import UNDEFINED, BaseClassDataType
-from datamodel_code_generator.reference import Reference
-from datamodel_code_generator.types import DataType, Types
-
-_INT: str = 'int'
-_FLOAT: str = 'float'
-_BYTES: str = 'bytes'
-_STR: str = 'str'
-
-SUBCLASS_BASE_CLASSES: Dict[Types, str] = {
-    Types.int32: _INT,
-    Types.int64: _INT,
-    Types.integer: _INT,
-    Types.float: _FLOAT,
-    Types.double: _FLOAT,
-    Types.number: _FLOAT,
-    Types.byte: _BYTES,
-    Types.string: _STR,
-}
-
-
-class Enum(DataModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'Enum.jinja2'
-    BASE_CLASS: ClassVar[str] = 'enum.Enum'
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_ENUM,)
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        methods: Optional[List[str]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        type_: Optional[Types] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ):
-        super().__init__(
-            reference=reference,
-            fields=fields,
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            methods=methods,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
-
-        if not base_classes and type_:
-            base_class = SUBCLASS_BASE_CLASSES.get(type_)
-            if base_class:
-                self.base_classes: List[BaseClassDataType] = [
-                    BaseClassDataType(type=base_class),
-                    *self.base_classes,
-                ]
-
-    @classmethod
-    def get_data_type(cls, types: Types, **kwargs: Any) -> DataType:
-        raise NotImplementedError
-
-    def get_member(self, field: DataModelFieldBase) -> Member:
-        return Member(self, field)
-
-    def find_member(self, value: Any) -> Optional[Member]:
-        repr_value = repr(value)
-        for field in self.fields:  # pragma: no cover
-            if field.default == repr_value:
-                return self.get_member(field)
-        return None  # pragma: no cover
-
-    @property
-    def imports(self) -> Tuple[Import, ...]:
-        return tuple(i for i in super().imports if i != IMPORT_ANY)
-
-
-class Member:
-    def __init__(self, enum: Enum, field: DataModelFieldBase) -> None:
-        self.enum: Enum = enum
-        self.field: DataModelFieldBase = field
-        self.alias: Optional[str] = None
-
-    def __repr__(self) -> str:
-        return f'{self.alias or self.enum.name}.{self.field.name}'
diff -pruN 0.26.4-3/datamodel_code_generator/model/imports.py 0.34.0-1/datamodel_code_generator/model/imports.py
--- 0.26.4-3/datamodel_code_generator/model/imports.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/model/imports.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,13 +0,0 @@
-from datamodel_code_generator.imports import Import
-
-IMPORT_DATACLASS = Import.from_full_path('dataclasses.dataclass')
-IMPORT_FIELD = Import.from_full_path('dataclasses.field')
-IMPORT_CLASSVAR = Import.from_full_path('typing.ClassVar')
-IMPORT_TYPED_DICT = Import.from_full_path('typing.TypedDict')
-IMPORT_TYPED_DICT_BACKPORT = Import.from_full_path('typing_extensions.TypedDict')
-IMPORT_NOT_REQUIRED = Import.from_full_path('typing.NotRequired')
-IMPORT_NOT_REQUIRED_BACKPORT = Import.from_full_path('typing_extensions.NotRequired')
-IMPORT_MSGSPEC_STRUCT = Import.from_full_path('msgspec.Struct')
-IMPORT_MSGSPEC_FIELD = Import.from_full_path('msgspec.field')
-IMPORT_MSGSPEC_META = Import.from_full_path('msgspec.Meta')
-IMPORT_MSGSPEC_CONVERT = Import.from_full_path('msgspec.convert')
diff -pruN 0.26.4-3/datamodel_code_generator/model/msgspec.py 0.34.0-1/datamodel_code_generator/model/msgspec.py
--- 0.26.4-3/datamodel_code_generator/model/msgspec.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/model/msgspec.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,338 +0,0 @@
-from functools import wraps
-from pathlib import Path
-from typing import (
-    Any,
-    ClassVar,
-    DefaultDict,
-    Dict,
-    List,
-    Optional,
-    Sequence,
-    Set,
-    Tuple,
-    Type,
-    TypeVar,
-)
-
-from pydantic import Field
-
-from datamodel_code_generator import DatetimeClassType, PythonVersion
-from datamodel_code_generator.imports import (
-    IMPORT_DATE,
-    IMPORT_DATETIME,
-    IMPORT_TIME,
-    IMPORT_TIMEDELTA,
-    Import,
-)
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model.base import UNDEFINED
-from datamodel_code_generator.model.imports import (
-    IMPORT_CLASSVAR,
-    IMPORT_MSGSPEC_CONVERT,
-    IMPORT_MSGSPEC_FIELD,
-    IMPORT_MSGSPEC_META,
-)
-from datamodel_code_generator.model.pydantic.base_model import (
-    Constraints as _Constraints,
-)
-from datamodel_code_generator.model.rootmodel import RootModel as _RootModel
-from datamodel_code_generator.model.types import DataTypeManager as _DataTypeManager
-from datamodel_code_generator.model.types import type_map_factory
-from datamodel_code_generator.reference import Reference
-from datamodel_code_generator.types import (
-    DataType,
-    StrictTypes,
-    Types,
-    chain_as_tuple,
-    get_optional_type,
-)
-
-
-def _has_field_assignment(field: DataModelFieldBase) -> bool:
-    return not (
-        field.required
-        or (field.represented_default == 'None' and field.strip_default_none)
-    )
-
-
-DataModelFieldBaseT = TypeVar('DataModelFieldBaseT', bound=DataModelFieldBase)
-
-
-def import_extender(cls: Type[DataModelFieldBaseT]) -> Type[DataModelFieldBaseT]:
-    original_imports: property = getattr(cls, 'imports', None)  # type: ignore
-
-    @wraps(original_imports.fget)  # type: ignore
-    def new_imports(self: DataModelFieldBaseT) -> Tuple[Import, ...]:
-        extra_imports = []
-        field = self.field
-        # TODO: Improve field detection
-        if field and field.startswith('field('):
-            extra_imports.append(IMPORT_MSGSPEC_FIELD)
-        if self.field and 'lambda: convert' in self.field:
-            extra_imports.append(IMPORT_MSGSPEC_CONVERT)
-        if self.annotated:
-            extra_imports.append(IMPORT_MSGSPEC_META)
-        if self.extras.get('is_classvar'):
-            extra_imports.append(IMPORT_CLASSVAR)
-        return chain_as_tuple(original_imports.fget(self), extra_imports)  # type: ignore
-
-    setattr(cls, 'imports', property(new_imports))
-    return cls
-
-
-class RootModel(_RootModel):
-    pass
-
-
-class Struct(DataModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'msgspec.jinja2'
-    BASE_CLASS: ClassVar[str] = 'msgspec.Struct'
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = ()
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        methods: Optional[List[str]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ) -> None:
-        super().__init__(
-            reference=reference,
-            fields=sorted(fields, key=_has_field_assignment, reverse=False),
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            methods=methods,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
-        self.extra_template_data.setdefault('base_class_kwargs', {})
-        if self.keyword_only:
-            self.add_base_class_kwarg('kw_only', 'True')
-
-    def add_base_class_kwarg(self, name: str, value):
-        self.extra_template_data['base_class_kwargs'][name] = value
-
-
-class Constraints(_Constraints):
-    # To override existing pattern alias
-    regex: Optional[str] = Field(None, alias='regex')
-    pattern: Optional[str] = Field(None, alias='pattern')
-
-
-@import_extender
-class DataModelField(DataModelFieldBase):
-    _FIELD_KEYS: ClassVar[Set[str]] = {
-        'default',
-        'default_factory',
-    }
-    _META_FIELD_KEYS: ClassVar[Set[str]] = {
-        'title',
-        'description',
-        'gt',
-        'ge',
-        'lt',
-        'le',
-        'multiple_of',
-        # 'min_items', # not supported by msgspec
-        # 'max_items', # not supported by msgspec
-        'min_length',
-        'max_length',
-        'pattern',
-        'examples',
-        # 'unique_items', # not supported by msgspec
-    }
-    _PARSE_METHOD = 'convert'
-    _COMPARE_EXPRESSIONS: ClassVar[Set[str]] = {'gt', 'ge', 'lt', 'le', 'multiple_of'}
-    constraints: Optional[Constraints] = None
-
-    def self_reference(self) -> bool:  # pragma: no cover
-        return isinstance(self.parent, Struct) and self.parent.reference.path in {
-            d.reference.path for d in self.data_type.all_data_types if d.reference
-        }
-
-    def process_const(self) -> None:
-        if 'const' not in self.extras:
-            return None
-        self.const = True
-        self.nullable = False
-        const = self.extras['const']
-        if self.data_type.type == 'str' and isinstance(
-            const, str
-        ):  # pragma: no cover # Literal supports only str
-            self.data_type = self.data_type.__class__(literals=[const])
-
-    def _get_strict_field_constraint_value(self, constraint: str, value: Any) -> Any:
-        if value is None or constraint not in self._COMPARE_EXPRESSIONS:
-            return value
-
-        if any(
-            data_type.type == 'float' for data_type in self.data_type.all_data_types
-        ):
-            return float(value)
-        return int(value)
-
-    @property
-    def field(self) -> Optional[str]:
-        """for backwards compatibility"""
-        result = str(self)
-        if result == '':
-            return None
-
-        return result
-
-    def __str__(self) -> str:
-        data: Dict[str, Any] = {
-            k: v for k, v in self.extras.items() if k in self._FIELD_KEYS
-        }
-        if self.alias:
-            data['name'] = self.alias
-
-        if self.default != UNDEFINED and self.default is not None:
-            data['default'] = self.default
-        elif not self.required:
-            data['default'] = None
-
-        if self.required:
-            data = {
-                k: v
-                for k, v in data.items()
-                if k
-                not in (
-                    'default',
-                    'default_factory',
-                )
-            }
-        elif self.default and 'default_factory' not in data:
-            default_factory = self._get_default_as_struct_model()
-            if default_factory is not None:
-                data.pop('default')
-                data['default_factory'] = default_factory
-
-        if not data:
-            return ''
-
-        if len(data) == 1 and 'default' in data:
-            return repr(data['default'])
-
-        kwargs = [
-            f'{k}={v if k == "default_factory" else repr(v)}' for k, v in data.items()
-        ]
-        return f'field({", ".join(kwargs)})'
-
-    @property
-    def annotated(self) -> Optional[str]:
-        if not self.use_annotated:  # pragma: no cover
-            return None
-
-        data: Dict[str, Any] = {
-            k: v for k, v in self.extras.items() if k in self._META_FIELD_KEYS
-        }
-        if (
-            self.constraints is not None
-            and not self.self_reference()
-            and not self.data_type.strict
-        ):
-            data = {
-                **data,
-                **{
-                    k: self._get_strict_field_constraint_value(k, v)
-                    for k, v in self.constraints.dict().items()
-                    if k in self._META_FIELD_KEYS
-                },
-            }
-
-        meta_arguments = sorted(
-            f'{k}={repr(v)}' for k, v in data.items() if v is not None
-        )
-        if not meta_arguments:
-            return None
-
-        meta = f'Meta({", ".join(meta_arguments)})'
-
-        if not self.required and not self.extras.get('is_classvar'):
-            type_hint = self.data_type.type_hint
-            annotated_type = f'Annotated[{type_hint}, {meta}]'
-            return get_optional_type(annotated_type, self.data_type.use_union_operator)
-
-        annotated_type = f'Annotated[{self.type_hint}, {meta}]'
-        if self.extras.get('is_classvar'):
-            annotated_type = f'ClassVar[{annotated_type}]'
-
-        return annotated_type
-
-    def _get_default_as_struct_model(self) -> Optional[str]:
-        for data_type in self.data_type.data_types or (self.data_type,):
-            # TODO: Check nested data_types
-            if data_type.is_dict or self.data_type.is_union:
-                # TODO: Parse Union and dict model for default
-                continue  # pragma: no cover
-            elif data_type.is_list and len(data_type.data_types) == 1:
-                data_type = data_type.data_types[0]
-                if (  # pragma: no cover
-                    data_type.reference
-                    and (
-                        isinstance(data_type.reference.source, Struct)
-                        or isinstance(data_type.reference.source, RootModel)
-                    )
-                    and isinstance(self.default, list)
-                ):
-                    return f'lambda: {self._PARSE_METHOD}({repr(self.default)},  type=list[{data_type.alias or data_type.reference.source.class_name}])'
-            elif data_type.reference and isinstance(data_type.reference.source, Struct):
-                return f'lambda: {self._PARSE_METHOD}({repr(self.default)},  type={data_type.alias or data_type.reference.source.class_name})'
-        return None
-
-
-class DataTypeManager(_DataTypeManager):
-    def __init__(
-        self,
-        python_version: PythonVersion = PythonVersion.PY_38,
-        use_standard_collections: bool = False,
-        use_generic_container_types: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        use_union_operator: bool = False,
-        use_pendulum: bool = False,
-        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
-    ):
-        super().__init__(
-            python_version,
-            use_standard_collections,
-            use_generic_container_types,
-            strict_types,
-            use_non_positive_negative_number_constrained_types,
-            use_union_operator,
-            use_pendulum,
-            target_datetime_class,
-        )
-
-        datetime_map = (
-            {
-                Types.time: self.data_type.from_import(IMPORT_TIME),
-                Types.date: self.data_type.from_import(IMPORT_DATE),
-                Types.date_time: self.data_type.from_import(IMPORT_DATETIME),
-                Types.timedelta: self.data_type.from_import(IMPORT_TIMEDELTA),
-            }
-            if target_datetime_class is DatetimeClassType.Datetime
-            else {}
-        )
-
-        self.type_map: Dict[Types, DataType] = {
-            **type_map_factory(self.data_type),
-            **datetime_map,
-        }
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic/__init__.py 0.34.0-1/datamodel_code_generator/model/pydantic/__init__.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic/__init__.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/model/pydantic/__init__.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,47 +0,0 @@
-from __future__ import annotations
-
-from typing import Iterable, Optional
-
-from pydantic import BaseModel as _BaseModel
-
-from .base_model import BaseModel, DataModelField
-from .custom_root_type import CustomRootType
-from .dataclass import DataClass
-from .types import DataTypeManager
-
-
-def dump_resolve_reference_action(class_names: Iterable[str]) -> str:
-    return '\n'.join(
-        f'{class_name}.update_forward_refs()' for class_name in class_names
-    )
-
-
-class Config(_BaseModel):
-    extra: Optional[str] = None
-    title: Optional[str] = None
-    allow_population_by_field_name: Optional[bool] = None
-    allow_extra_fields: Optional[bool] = None
-    allow_mutation: Optional[bool] = None
-    arbitrary_types_allowed: Optional[bool] = None
-    orm_mode: Optional[bool] = None
-
-
-# def get_validator_template() -> Template:
-#     template_file_path: Path = Path('pydantic') / 'one_of_validator.jinja2'
-#     loader = FileSystemLoader(str(TEMPLATE_DIR / template_file_path.parent))
-#     environment: Environment = Environment(loader=loader, autoescape=True)
-#     return environment.get_template(template_file_path.name)
-#
-#
-# VALIDATOR_TEMPLATE: Template = get_validator_template()
-
-
-__all__ = [
-    'BaseModel',
-    'DataModelField',
-    'CustomRootType',
-    'DataClass',
-    'dump_resolve_reference_action',
-    'DataTypeManager',
-    # 'VALIDATOR_TEMPLATE',
-]
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic/base_model.py 0.34.0-1/datamodel_code_generator/model/pydantic/base_model.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic/base_model.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/model/pydantic/base_model.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,325 +0,0 @@
-from abc import ABC
-from pathlib import Path
-from typing import Any, ClassVar, DefaultDict, Dict, List, Optional, Set, Tuple
-
-from pydantic import Field
-
-from datamodel_code_generator.imports import Import
-from datamodel_code_generator.model import (
-    ConstraintsBase,
-    DataModel,
-    DataModelFieldBase,
-)
-from datamodel_code_generator.model.base import UNDEFINED
-from datamodel_code_generator.model.pydantic.imports import (
-    IMPORT_ANYURL,
-    IMPORT_EXTRA,
-    IMPORT_FIELD,
-)
-from datamodel_code_generator.reference import Reference
-from datamodel_code_generator.types import UnionIntFloat, chain_as_tuple
-from datamodel_code_generator.util import cached_property
-
-
-class Constraints(ConstraintsBase):
-    gt: Optional[UnionIntFloat] = Field(None, alias='exclusiveMinimum')
-    ge: Optional[UnionIntFloat] = Field(None, alias='minimum')
-    lt: Optional[UnionIntFloat] = Field(None, alias='exclusiveMaximum')
-    le: Optional[UnionIntFloat] = Field(None, alias='maximum')
-    multiple_of: Optional[float] = Field(None, alias='multipleOf')
-    min_items: Optional[int] = Field(None, alias='minItems')
-    max_items: Optional[int] = Field(None, alias='maxItems')
-    min_length: Optional[int] = Field(None, alias='minLength')
-    max_length: Optional[int] = Field(None, alias='maxLength')
-    regex: Optional[str] = Field(None, alias='pattern')
-
-
-class DataModelField(DataModelFieldBase):
-    _EXCLUDE_FIELD_KEYS: ClassVar[Set[str]] = {
-        'alias',
-        'default',
-        'const',
-        'gt',
-        'ge',
-        'lt',
-        'le',
-        'multiple_of',
-        'min_items',
-        'max_items',
-        'min_length',
-        'max_length',
-        'regex',
-    }
-    _COMPARE_EXPRESSIONS: ClassVar[Set[str]] = {'gt', 'ge', 'lt', 'le'}
-    constraints: Optional[Constraints] = None
-    _PARSE_METHOD: ClassVar[str] = 'parse_obj'
-
-    @property
-    def method(self) -> Optional[str]:
-        return self.validator
-
-    @property
-    def validator(self) -> Optional[str]:
-        return None
-        # TODO refactor this method for other validation logic
-        # from datamodel_code_generator.model.pydantic import VALIDATOR_TEMPLATE
-        #
-        # return VALIDATOR_TEMPLATE.render(
-        #     field_name=self.name, types=','.join([t.type_hint for t in self.data_types])
-        # )
-
-    @property
-    def field(self) -> Optional[str]:
-        """for backwards compatibility"""
-        result = str(self)
-        if (
-            self.use_default_kwarg
-            and not result.startswith('Field(...')
-            and not result.startswith('Field(default_factory=')
-        ):
-            # Use `default=` for fields that have a default value so that type
-            # checkers using @dataclass_transform can infer the field as
-            # optional in __init__.
-            result = result.replace('Field(', 'Field(default=')
-        if result == '':
-            return None
-
-        return result
-
-    def self_reference(self) -> bool:
-        return isinstance(
-            self.parent, BaseModelBase
-        ) and self.parent.reference.path in {
-            d.reference.path for d in self.data_type.all_data_types if d.reference
-        }
-
-    def _get_strict_field_constraint_value(self, constraint: str, value: Any) -> Any:
-        if value is None or constraint not in self._COMPARE_EXPRESSIONS:
-            return value
-
-        if any(
-            data_type.type == 'float' for data_type in self.data_type.all_data_types
-        ):
-            return float(value)
-        return int(value)
-
-    def _get_default_as_pydantic_model(self) -> Optional[str]:
-        for data_type in self.data_type.data_types or (self.data_type,):
-            # TODO: Check nested data_types
-            if data_type.is_dict or self.data_type.is_union:
-                # TODO: Parse Union and dict model for default
-                continue
-            elif data_type.is_list and len(data_type.data_types) == 1:
-                data_type = data_type.data_types[0]
-                if (
-                    data_type.reference
-                    and isinstance(data_type.reference.source, BaseModelBase)
-                    and isinstance(self.default, list)
-                ):  # pragma: no cover
-                    return f'lambda :[{data_type.alias or data_type.reference.source.class_name}.{self._PARSE_METHOD}(v) for v in {repr(self.default)}]'
-            elif data_type.reference and isinstance(
-                data_type.reference.source, BaseModelBase
-            ):  # pragma: no cover
-                return f'lambda :{data_type.alias or data_type.reference.source.class_name}.{self._PARSE_METHOD}({repr(self.default)})'
-        return None
-
-    def _process_data_in_str(self, data: Dict[str, Any]) -> None:
-        if self.const:
-            data['const'] = True
-
-    def _process_annotated_field_arguments(
-        self, field_arguments: List[str]
-    ) -> List[str]:
-        return field_arguments
-
-    def __str__(self) -> str:
-        data: Dict[str, Any] = {
-            k: v for k, v in self.extras.items() if k not in self._EXCLUDE_FIELD_KEYS
-        }
-        if self.alias:
-            data['alias'] = self.alias
-        if (
-            self.constraints is not None
-            and not self.self_reference()
-            and not self.data_type.strict
-        ):
-            data = {
-                **data,
-                **(
-                    {}
-                    if any(
-                        d.import_ == IMPORT_ANYURL
-                        for d in self.data_type.all_data_types
-                    )
-                    else {
-                        k: self._get_strict_field_constraint_value(k, v)
-                        for k, v in self.constraints.dict(exclude_unset=True).items()
-                    }
-                ),
-            }
-
-        if self.use_field_description:
-            data.pop('description', None)  # Description is part of field docstring
-
-        self._process_data_in_str(data)
-
-        discriminator = data.pop('discriminator', None)
-        if discriminator:
-            if isinstance(discriminator, str):
-                data['discriminator'] = discriminator
-            elif isinstance(discriminator, dict):  # pragma: no cover
-                data['discriminator'] = discriminator['propertyName']
-
-        if self.required:
-            default_factory = None
-        elif self.default and 'default_factory' not in data:
-            default_factory = self._get_default_as_pydantic_model()
-        else:
-            default_factory = data.pop('default_factory', None)
-
-        field_arguments = sorted(
-            f'{k}={repr(v)}' for k, v in data.items() if v is not None
-        )
-
-        if not field_arguments and not default_factory:
-            if self.nullable and self.required:
-                return 'Field(...)'  # Field() is for mypy
-            return ''
-
-        if self.use_annotated:
-            field_arguments = self._process_annotated_field_arguments(field_arguments)
-        elif self.required:
-            field_arguments = ['...', *field_arguments]
-        elif default_factory:
-            field_arguments = [f'default_factory={default_factory}', *field_arguments]
-        else:
-            field_arguments = [f'{repr(self.default)}', *field_arguments]
-
-        return f'Field({", ".join(field_arguments)})'
-
-    @property
-    def annotated(self) -> Optional[str]:
-        if not self.use_annotated or not str(self):
-            return None
-        return f'Annotated[{self.type_hint}, {str(self)}]'
-
-    @property
-    def imports(self) -> Tuple[Import, ...]:
-        if self.field:
-            return chain_as_tuple(super().imports, (IMPORT_FIELD,))
-        return super().imports
-
-
-class BaseModelBase(DataModel, ABC):
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Any]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ) -> None:
-        methods: List[str] = [field.method for field in fields if field.method]
-
-        super().__init__(
-            fields=fields,
-            reference=reference,
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            methods=methods,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
-
-    @cached_property
-    def template_file_path(self) -> Path:
-        # This property is for Backward compatibility
-        # Current version supports '{custom_template_dir}/BaseModel.jinja'
-        # But, Future version will support only '{custom_template_dir}/pydantic/BaseModel.jinja'
-        if self._custom_template_dir is not None:
-            custom_template_file_path = (
-                self._custom_template_dir / Path(self.TEMPLATE_FILE_PATH).name
-            )
-            if custom_template_file_path.exists():
-                return custom_template_file_path
-        return super().template_file_path
-
-
-class BaseModel(BaseModelBase):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'pydantic/BaseModel.jinja2'
-    BASE_CLASS: ClassVar[str] = 'pydantic.BaseModel'
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Any]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ) -> None:
-        super().__init__(
-            reference=reference,
-            fields=fields,
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
-        config_parameters: Dict[str, Any] = {}
-
-        additionalProperties = self.extra_template_data.get('additionalProperties')
-        allow_extra_fields = self.extra_template_data.get('allow_extra_fields')
-        if additionalProperties is not None or allow_extra_fields:
-            config_parameters['extra'] = (
-                'Extra.allow'
-                if additionalProperties or allow_extra_fields
-                else 'Extra.forbid'
-            )
-            self._additional_imports.append(IMPORT_EXTRA)
-
-        for config_attribute in 'allow_population_by_field_name', 'allow_mutation':
-            if config_attribute in self.extra_template_data:
-                config_parameters[config_attribute] = self.extra_template_data[
-                    config_attribute
-                ]
-        for data_type in self.all_data_types:
-            if data_type.is_custom_type:
-                config_parameters['arbitrary_types_allowed'] = True
-                break
-
-        if isinstance(self.extra_template_data.get('config'), dict):
-            for key, value in self.extra_template_data['config'].items():
-                config_parameters[key] = value
-
-        if config_parameters:
-            from datamodel_code_generator.model.pydantic import Config
-
-            self.extra_template_data['config'] = Config.parse_obj(config_parameters)
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic/custom_root_type.py 0.34.0-1/datamodel_code_generator/model/pydantic/custom_root_type.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic/custom_root_type.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/model/pydantic/custom_root_type.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,10 +0,0 @@
-from __future__ import annotations
-
-from typing import ClassVar
-
-from datamodel_code_generator.model.pydantic.base_model import BaseModel
-
-
-class CustomRootType(BaseModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'pydantic/BaseModel_root.jinja2'
-    BASE_CLASS: ClassVar[str] = 'pydantic.BaseModel'
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic/dataclass.py 0.34.0-1/datamodel_code_generator/model/pydantic/dataclass.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic/dataclass.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/model/pydantic/dataclass.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,12 +0,0 @@
-from __future__ import annotations
-
-from typing import ClassVar, Tuple
-
-from datamodel_code_generator.imports import Import
-from datamodel_code_generator.model import DataModel
-from datamodel_code_generator.model.pydantic.imports import IMPORT_DATACLASS
-
-
-class DataClass(DataModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'pydantic/dataclass.jinja2'
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_DATACLASS,)
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic/imports.py 0.34.0-1/datamodel_code_generator/model/pydantic/imports.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic/imports.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/model/pydantic/imports.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,35 +0,0 @@
-from datamodel_code_generator.imports import Import
-
-IMPORT_CONSTR = Import.from_full_path('pydantic.constr')
-IMPORT_CONINT = Import.from_full_path('pydantic.conint')
-IMPORT_CONFLOAT = Import.from_full_path('pydantic.confloat')
-IMPORT_CONDECIMAL = Import.from_full_path('pydantic.condecimal')
-IMPORT_CONBYTES = Import.from_full_path('pydantic.conbytes')
-IMPORT_POSITIVE_INT = Import.from_full_path('pydantic.PositiveInt')
-IMPORT_NEGATIVE_INT = Import.from_full_path('pydantic.NegativeInt')
-IMPORT_NON_POSITIVE_INT = Import.from_full_path('pydantic.NonPositiveInt')
-IMPORT_NON_NEGATIVE_INT = Import.from_full_path('pydantic.NonNegativeInt')
-IMPORT_POSITIVE_FLOAT = Import.from_full_path('pydantic.PositiveFloat')
-IMPORT_NEGATIVE_FLOAT = Import.from_full_path('pydantic.NegativeFloat')
-IMPORT_NON_NEGATIVE_FLOAT = Import.from_full_path('pydantic.NonNegativeFloat')
-IMPORT_NON_POSITIVE_FLOAT = Import.from_full_path('pydantic.NonPositiveFloat')
-IMPORT_SECRET_STR = Import.from_full_path('pydantic.SecretStr')
-IMPORT_EMAIL_STR = Import.from_full_path('pydantic.EmailStr')
-IMPORT_UUID1 = Import.from_full_path('pydantic.UUID1')
-IMPORT_UUID2 = Import.from_full_path('pydantic.UUID2')
-IMPORT_UUID3 = Import.from_full_path('pydantic.UUID3')
-IMPORT_UUID4 = Import.from_full_path('pydantic.UUID4')
-IMPORT_UUID5 = Import.from_full_path('pydantic.UUID5')
-IMPORT_ANYURL = Import.from_full_path('pydantic.AnyUrl')
-IMPORT_IPV4ADDRESS = Import.from_full_path('ipaddress.IPv4Address')
-IMPORT_IPV6ADDRESS = Import.from_full_path('ipaddress.IPv6Address')
-IMPORT_IPV4NETWORKS = Import.from_full_path('ipaddress.IPv4Network')
-IMPORT_IPV6NETWORKS = Import.from_full_path('ipaddress.IPv6Network')
-IMPORT_EXTRA = Import.from_full_path('pydantic.Extra')
-IMPORT_FIELD = Import.from_full_path('pydantic.Field')
-IMPORT_STRICT_INT = Import.from_full_path('pydantic.StrictInt')
-IMPORT_STRICT_FLOAT = Import.from_full_path('pydantic.StrictFloat')
-IMPORT_STRICT_STR = Import.from_full_path('pydantic.StrictStr')
-IMPORT_STRICT_BOOL = Import.from_full_path('pydantic.StrictBool')
-IMPORT_STRICT_BYTES = Import.from_full_path('pydantic.StrictBytes')
-IMPORT_DATACLASS = Import.from_full_path('pydantic.dataclasses.dataclass')
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic/types.py 0.34.0-1/datamodel_code_generator/model/pydantic/types.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic/types.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/model/pydantic/types.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,353 +0,0 @@
-from __future__ import annotations
-
-from decimal import Decimal
-from typing import Any, ClassVar, Dict, Optional, Sequence, Set, Type
-
-from datamodel_code_generator.format import DatetimeClassType, PythonVersion
-from datamodel_code_generator.imports import (
-    IMPORT_ANY,
-    IMPORT_DATE,
-    IMPORT_DATETIME,
-    IMPORT_DECIMAL,
-    IMPORT_PATH,
-    IMPORT_PENDULUM_DATE,
-    IMPORT_PENDULUM_DATETIME,
-    IMPORT_PENDULUM_DURATION,
-    IMPORT_PENDULUM_TIME,
-    IMPORT_TIME,
-    IMPORT_TIMEDELTA,
-    IMPORT_UUID,
-)
-from datamodel_code_generator.model.pydantic.imports import (
-    IMPORT_ANYURL,
-    IMPORT_CONBYTES,
-    IMPORT_CONDECIMAL,
-    IMPORT_CONFLOAT,
-    IMPORT_CONINT,
-    IMPORT_CONSTR,
-    IMPORT_EMAIL_STR,
-    IMPORT_IPV4ADDRESS,
-    IMPORT_IPV4NETWORKS,
-    IMPORT_IPV6ADDRESS,
-    IMPORT_IPV6NETWORKS,
-    IMPORT_NEGATIVE_FLOAT,
-    IMPORT_NEGATIVE_INT,
-    IMPORT_NON_NEGATIVE_FLOAT,
-    IMPORT_NON_NEGATIVE_INT,
-    IMPORT_NON_POSITIVE_FLOAT,
-    IMPORT_NON_POSITIVE_INT,
-    IMPORT_POSITIVE_FLOAT,
-    IMPORT_POSITIVE_INT,
-    IMPORT_SECRET_STR,
-    IMPORT_STRICT_BOOL,
-    IMPORT_STRICT_BYTES,
-    IMPORT_STRICT_FLOAT,
-    IMPORT_STRICT_INT,
-    IMPORT_STRICT_STR,
-    IMPORT_UUID1,
-    IMPORT_UUID2,
-    IMPORT_UUID3,
-    IMPORT_UUID4,
-    IMPORT_UUID5,
-)
-from datamodel_code_generator.types import DataType, StrictTypes, Types, UnionIntFloat
-from datamodel_code_generator.types import DataTypeManager as _DataTypeManager
-
-
-def type_map_factory(
-    data_type: Type[DataType],
-    strict_types: Sequence[StrictTypes],
-    pattern_key: str,
-    use_pendulum: bool,
-    target_datetime_class: DatetimeClassType,
-) -> Dict[Types, DataType]:
-    data_type_int = data_type(type='int')
-    data_type_float = data_type(type='float')
-    data_type_str = data_type(type='str')
-    result = {
-        Types.integer: data_type_int,
-        Types.int32: data_type_int,
-        Types.int64: data_type_int,
-        Types.number: data_type_float,
-        Types.float: data_type_float,
-        Types.double: data_type_float,
-        Types.decimal: data_type.from_import(IMPORT_DECIMAL),
-        Types.time: data_type.from_import(IMPORT_TIME),
-        Types.string: data_type_str,
-        Types.byte: data_type_str,  # base64 encoded string
-        Types.binary: data_type(type='bytes'),
-        Types.date: data_type.from_import(IMPORT_DATE),
-        Types.date_time: data_type.from_import(IMPORT_DATETIME),
-        Types.timedelta: data_type.from_import(IMPORT_TIMEDELTA),
-        Types.path: data_type.from_import(IMPORT_PATH),
-        Types.password: data_type.from_import(IMPORT_SECRET_STR),
-        Types.email: data_type.from_import(IMPORT_EMAIL_STR),
-        Types.uuid: data_type.from_import(IMPORT_UUID),
-        Types.uuid1: data_type.from_import(IMPORT_UUID1),
-        Types.uuid2: data_type.from_import(IMPORT_UUID2),
-        Types.uuid3: data_type.from_import(IMPORT_UUID3),
-        Types.uuid4: data_type.from_import(IMPORT_UUID4),
-        Types.uuid5: data_type.from_import(IMPORT_UUID5),
-        Types.uri: data_type.from_import(IMPORT_ANYURL),
-        Types.hostname: data_type.from_import(
-            IMPORT_CONSTR,
-            strict=StrictTypes.str in strict_types,
-            # https://github.com/horejsek/python-fastjsonschema/blob/61c6997a8348b8df9b22e029ca2ba35ef441fbb8/fastjsonschema/draft04.py#L31
-            kwargs={
-                pattern_key: r"r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])\Z'",
-                **({'strict': True} if StrictTypes.str in strict_types else {}),
-            },
-        ),
-        Types.ipv4: data_type.from_import(IMPORT_IPV4ADDRESS),
-        Types.ipv6: data_type.from_import(IMPORT_IPV6ADDRESS),
-        Types.ipv4_network: data_type.from_import(IMPORT_IPV4NETWORKS),
-        Types.ipv6_network: data_type.from_import(IMPORT_IPV6NETWORKS),
-        Types.boolean: data_type(type='bool'),
-        Types.object: data_type.from_import(IMPORT_ANY, is_dict=True),
-        Types.null: data_type(type='None'),
-        Types.array: data_type.from_import(IMPORT_ANY, is_list=True),
-        Types.any: data_type.from_import(IMPORT_ANY),
-    }
-    if use_pendulum:
-        result[Types.date] = data_type.from_import(IMPORT_PENDULUM_DATE)
-        result[Types.date_time] = data_type.from_import(IMPORT_PENDULUM_DATETIME)
-        result[Types.time] = data_type.from_import(IMPORT_PENDULUM_TIME)
-        result[Types.timedelta] = data_type.from_import(IMPORT_PENDULUM_DURATION)
-
-    return result
-
-
-def strict_type_map_factory(data_type: Type[DataType]) -> Dict[StrictTypes, DataType]:
-    return {
-        StrictTypes.int: data_type.from_import(IMPORT_STRICT_INT, strict=True),
-        StrictTypes.float: data_type.from_import(IMPORT_STRICT_FLOAT, strict=True),
-        StrictTypes.bytes: data_type.from_import(IMPORT_STRICT_BYTES, strict=True),
-        StrictTypes.bool: data_type.from_import(IMPORT_STRICT_BOOL, strict=True),
-        StrictTypes.str: data_type.from_import(IMPORT_STRICT_STR, strict=True),
-    }
-
-
-number_kwargs: Set[str] = {
-    'exclusiveMinimum',
-    'minimum',
-    'exclusiveMaximum',
-    'maximum',
-    'multipleOf',
-}
-
-string_kwargs: Set[str] = {'minItems', 'maxItems', 'minLength', 'maxLength', 'pattern'}
-
-byes_kwargs: Set[str] = {'minLength', 'maxLength'}
-
-escape_characters = str.maketrans(
-    {
-        "'": r'\'',
-        '\b': r'\b',
-        '\f': r'\f',
-        '\n': r'\n',
-        '\r': r'\r',
-        '\t': r'\t',
-    }
-)
-
-
-class DataTypeManager(_DataTypeManager):
-    PATTERN_KEY: ClassVar[str] = 'regex'
-
-    def __init__(
-        self,
-        python_version: PythonVersion = PythonVersion.PY_38,
-        use_standard_collections: bool = False,
-        use_generic_container_types: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        use_union_operator: bool = False,
-        use_pendulum: bool = False,
-        target_datetime_class: Optional[DatetimeClassType] = None,
-    ):
-        super().__init__(
-            python_version,
-            use_standard_collections,
-            use_generic_container_types,
-            strict_types,
-            use_non_positive_negative_number_constrained_types,
-            use_union_operator,
-            use_pendulum,
-            target_datetime_class,
-        )
-
-        self.type_map: Dict[Types, DataType] = self.type_map_factory(
-            self.data_type,
-            strict_types=self.strict_types,
-            pattern_key=self.PATTERN_KEY,
-            target_datetime_class=target_datetime_class,
-        )
-        self.strict_type_map: Dict[StrictTypes, DataType] = strict_type_map_factory(
-            self.data_type,
-        )
-
-        self.kwargs_schema_to_model: Dict[str, str] = {
-            'exclusiveMinimum': 'gt',
-            'minimum': 'ge',
-            'exclusiveMaximum': 'lt',
-            'maximum': 'le',
-            'multipleOf': 'multiple_of',
-            'minItems': 'min_items',
-            'maxItems': 'max_items',
-            'minLength': 'min_length',
-            'maxLength': 'max_length',
-            'pattern': self.PATTERN_KEY,
-        }
-
-    def type_map_factory(
-        self,
-        data_type: Type[DataType],
-        strict_types: Sequence[StrictTypes],
-        pattern_key: str,
-        target_datetime_class: DatetimeClassType,
-    ) -> Dict[Types, DataType]:
-        return type_map_factory(
-            data_type,
-            strict_types,
-            pattern_key,
-            self.use_pendulum,
-            self.target_datetime_class,
-        )
-
-    def transform_kwargs(
-        self, kwargs: Dict[str, Any], filter_: Set[str]
-    ) -> Dict[str, str]:
-        return {
-            self.kwargs_schema_to_model.get(k, k): v
-            for (k, v) in kwargs.items()
-            if v is not None and k in filter_
-        }
-
-    def get_data_int_type(
-        self,
-        types: Types,
-        **kwargs: Any,
-    ) -> DataType:
-        data_type_kwargs: Dict[str, Any] = self.transform_kwargs(kwargs, number_kwargs)
-        strict = StrictTypes.int in self.strict_types
-        if data_type_kwargs:
-            if not strict:
-                if data_type_kwargs == {'gt': 0}:
-                    return self.data_type.from_import(IMPORT_POSITIVE_INT)
-                if data_type_kwargs == {'lt': 0}:
-                    return self.data_type.from_import(IMPORT_NEGATIVE_INT)
-                if (
-                    data_type_kwargs == {'ge': 0}
-                    and self.use_non_positive_negative_number_constrained_types
-                ):
-                    return self.data_type.from_import(IMPORT_NON_NEGATIVE_INT)
-                if (
-                    data_type_kwargs == {'le': 0}
-                    and self.use_non_positive_negative_number_constrained_types
-                ):
-                    return self.data_type.from_import(IMPORT_NON_POSITIVE_INT)
-            kwargs = {k: int(v) for k, v in data_type_kwargs.items()}
-            if strict:
-                kwargs['strict'] = True
-            return self.data_type.from_import(IMPORT_CONINT, kwargs=kwargs)
-        if strict:
-            return self.strict_type_map[StrictTypes.int]
-        return self.type_map[types]
-
-    def get_data_float_type(
-        self,
-        types: Types,
-        **kwargs: Any,
-    ) -> DataType:
-        data_type_kwargs = self.transform_kwargs(kwargs, number_kwargs)
-        strict = StrictTypes.float in self.strict_types
-        if data_type_kwargs:
-            if not strict:
-                if data_type_kwargs == {'gt': 0}:
-                    return self.data_type.from_import(IMPORT_POSITIVE_FLOAT)
-                if data_type_kwargs == {'lt': 0}:
-                    return self.data_type.from_import(IMPORT_NEGATIVE_FLOAT)
-                if (
-                    data_type_kwargs == {'ge': 0}
-                    and self.use_non_positive_negative_number_constrained_types
-                ):
-                    return self.data_type.from_import(IMPORT_NON_NEGATIVE_FLOAT)
-                if (
-                    data_type_kwargs == {'le': 0}
-                    and self.use_non_positive_negative_number_constrained_types
-                ):
-                    return self.data_type.from_import(IMPORT_NON_POSITIVE_FLOAT)
-            kwargs = {k: float(v) for k, v in data_type_kwargs.items()}
-            if strict:
-                kwargs['strict'] = True
-            return self.data_type.from_import(IMPORT_CONFLOAT, kwargs=kwargs)
-        if strict:
-            return self.strict_type_map[StrictTypes.float]
-        return self.type_map[types]
-
-    def get_data_decimal_type(self, types: Types, **kwargs: Any) -> DataType:
-        data_type_kwargs = self.transform_kwargs(kwargs, number_kwargs)
-        if data_type_kwargs:
-            return self.data_type.from_import(
-                IMPORT_CONDECIMAL,
-                kwargs={
-                    k: Decimal(str(v) if isinstance(v, UnionIntFloat) else v)
-                    for k, v in data_type_kwargs.items()
-                },
-            )
-        return self.type_map[types]
-
-    def get_data_str_type(self, types: Types, **kwargs: Any) -> DataType:
-        data_type_kwargs: Dict[str, Any] = self.transform_kwargs(kwargs, string_kwargs)
-        strict = StrictTypes.str in self.strict_types
-        if data_type_kwargs:
-            if strict:
-                data_type_kwargs['strict'] = True
-            if self.PATTERN_KEY in data_type_kwargs:
-                escaped_regex = data_type_kwargs[self.PATTERN_KEY].translate(
-                    escape_characters
-                )
-                # TODO: remove unneeded escaped characters
-                data_type_kwargs[self.PATTERN_KEY] = f"r'{escaped_regex}'"
-            return self.data_type.from_import(IMPORT_CONSTR, kwargs=data_type_kwargs)
-        if strict:
-            return self.strict_type_map[StrictTypes.str]
-        return self.type_map[types]
-
-    def get_data_bytes_type(self, types: Types, **kwargs: Any) -> DataType:
-        data_type_kwargs: Dict[str, Any] = self.transform_kwargs(kwargs, byes_kwargs)
-        strict = StrictTypes.bytes in self.strict_types
-        if data_type_kwargs:
-            if not strict:
-                return self.data_type.from_import(
-                    IMPORT_CONBYTES, kwargs=data_type_kwargs
-                )
-        # conbytes doesn't accept strict argument
-        # https://github.com/samuelcolvin/pydantic/issues/2489
-        #    if strict:
-        #         data_type_kwargs['strict'] = True
-        #     return self.data_type.from_import(IMPORT_CONBYTES, kwargs=data_type_kwargs)
-        if strict:
-            return self.strict_type_map[StrictTypes.bytes]
-        return self.type_map[types]
-
-    def get_data_type(
-        self,
-        types: Types,
-        **kwargs: Any,
-    ) -> DataType:
-        if types == Types.string:
-            return self.get_data_str_type(types, **kwargs)
-        elif types in (Types.int32, Types.int64, Types.integer):
-            return self.get_data_int_type(types, **kwargs)
-        elif types in (Types.float, Types.double, Types.number, Types.time):
-            return self.get_data_float_type(types, **kwargs)
-        elif types == Types.decimal:
-            return self.get_data_decimal_type(types, **kwargs)
-        elif types == Types.binary:
-            return self.get_data_bytes_type(types, **kwargs)
-        elif types == Types.boolean:
-            if StrictTypes.bool in self.strict_types:
-                return self.strict_type_map[StrictTypes.bool]
-
-        return self.type_map[types]
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic_v2/__init__.py 0.34.0-1/datamodel_code_generator/model/pydantic_v2/__init__.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic_v2/__init__.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/model/pydantic_v2/__init__.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,36 +0,0 @@
-from __future__ import annotations
-
-from typing import Iterable, Optional, Tuple
-
-from pydantic import BaseModel as _BaseModel
-
-from .base_model import BaseModel, DataModelField, UnionMode
-from .root_model import RootModel
-from .types import DataTypeManager
-
-
-def dump_resolve_reference_action(class_names: Iterable[str]) -> str:
-    return '\n'.join(f'{class_name}.model_rebuild()' for class_name in class_names)
-
-
-class ConfigDict(_BaseModel):
-    extra: Optional[str] = None
-    title: Optional[str] = None
-    populate_by_name: Optional[bool] = None
-    allow_extra_fields: Optional[bool] = None
-    from_attributes: Optional[bool] = None
-    frozen: Optional[bool] = None
-    arbitrary_types_allowed: Optional[bool] = None
-    protected_namespaces: Optional[Tuple[str, ...]] = None
-    regex_engine: Optional[str] = None
-    use_enum_values: Optional[bool] = None
-
-
-__all__ = [
-    'BaseModel',
-    'DataModelField',
-    'RootModel',
-    'dump_resolve_reference_action',
-    'DataTypeManager',
-    'UnionMode',
-]
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic_v2/base_model.py 0.34.0-1/datamodel_code_generator/model/pydantic_v2/base_model.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic_v2/base_model.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.34.0-1/datamodel_code_generator/model/pydantic_v2/base_model.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,247 +0,0 @@
-import re
-from enum import Enum
-from pathlib import Path
-from typing import (
-    Any,
-    ClassVar,
-    DefaultDict,
-    Dict,
-    List,
-    NamedTuple,
-    Optional,
-    Set,
-)
-
-from pydantic import Field
-from typing_extensions import Literal
-
-from datamodel_code_generator.model.base import UNDEFINED, DataModelFieldBase
-from datamodel_code_generator.model.pydantic.base_model import (
-    BaseModelBase,
-)
-from datamodel_code_generator.model.pydantic.base_model import (
-    Constraints as _Constraints,
-)
-from datamodel_code_generator.model.pydantic.base_model import (
-    DataModelField as DataModelFieldV1,
-)
-from datamodel_code_generator.model.pydantic_v2.imports import IMPORT_CONFIG_DICT
-from datamodel_code_generator.reference import Reference
-from datamodel_code_generator.util import field_validator, model_validator
-
-
-class UnionMode(Enum):
-    smart = 'smart'
-    left_to_right = 'left_to_right'
-
-
-class Constraints(_Constraints):
-    # To override existing pattern alias
-    regex: Optional[str] = Field(None, alias='regex')
-    pattern: Optional[str] = Field(None, alias='pattern')
-
-    @model_validator(mode='before')
-    def validate_min_max_items(cls, values: Any) -> Dict[str, Any]:
-        if not isinstance(values, dict):  # pragma: no cover
-            return values
-        min_items = values.pop('minItems', None)
-        if min_items is not None:
-            values['minLength'] = min_items
-        max_items = values.pop('maxItems', None)
-        if max_items is not None:
-            values['maxLength'] = max_items
-        return values
-
-
-class DataModelField(DataModelFieldV1):
-    _EXCLUDE_FIELD_KEYS: ClassVar[Set[str]] = {
-        'alias',
-        'default',
-        'gt',
-        'ge',
-        'lt',
-        'le',
-        'multiple_of',
-        'min_length',
-        'max_length',
-        'pattern',
-    }
-    _DEFAULT_FIELD_KEYS: ClassVar[Set[str]] = {
-        'default',
-        'default_factory',
-        'alias',
-        'alias_priority',
-        'validation_alias',
-        'serialization_alias',
-        'title',
-        'description',
-        'examples',
-        'exclude',
-        'discriminator',
-        'json_schema_extra',
-        'frozen',
-        'validate_default',
-        'repr',
-        'init_var',
-        'kw_only',
-        'pattern',
-        'strict',
-        'gt',
-        'ge',
-        'lt',
-        'le',
-        'multiple_of',
-        'allow_inf_nan',
-        'max_digits',
-        'decimal_places',
-        'min_length',
-        'max_length',
-        'union_mode',
-    }
-    constraints: Optional[Constraints] = None
-    _PARSE_METHOD: ClassVar[str] = 'model_validate'
-    can_have_extra_keys: ClassVar[bool] = False
-
-    @field_validator('extras')
-    def validate_extras(cls, values: Any) -> Dict[str, Any]:
-        if not isinstance(values, dict):  # pragma: no cover
-            return values
-        if 'examples' in values:
-            return values
-
-        if 'example' in values:
-            values['examples'] = [values.pop('example')]
-        return values
-
-    def process_const(self) -> None:
-        if 'const' not in self.extras:
-            return None
-        self.const = True
-        self.nullable = False
-        const = self.extras['const']
-        self.data_type = self.data_type.__class__(literals=[const])
-        if not self.default:
-            self.default = const
-
-    def _process_data_in_str(self, data: Dict[str, Any]) -> None:
-        if self.const:
-            # const is removed in pydantic 2.0
-            data.pop('const')
-
-        # unique_items is not supported in pydantic 2.0
-        data.pop('unique_items', None)
-
-        if 'union_mode' in data:
-            if self.data_type.is_union:
-                data['union_mode'] = data.pop('union_mode').value
-            else:
-                data.pop('union_mode')
-
-        # **extra is not supported in pydantic 2.0
-        json_schema_extra = {
-            k: v for k, v in data.items() if k not in self._DEFAULT_FIELD_KEYS
-        }
-        if json_schema_extra:
-            data['json_schema_extra'] = json_schema_extra
-            for key in json_schema_extra.keys():
-                data.pop(key)
-
-    def _process_annotated_field_arguments(
-        self,
-        field_arguments: List[str],
-    ) -> List[str]:
-        return field_arguments
-
-
-class ConfigAttribute(NamedTuple):
-    from_: str
-    to: str
-    invert: bool
-
-
-class BaseModel(BaseModelBase):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'pydantic_v2/BaseModel.jinja2'
-    BASE_CLASS: ClassVar[str] = 'pydantic.BaseModel'
-    CONFIG_ATTRIBUTES: ClassVar[List[ConfigAttribute]] = [
-        ConfigAttribute('allow_population_by_field_name', 'populate_by_name', False),
-        ConfigAttribute('populate_by_name', 'populate_by_name', False),
-        ConfigAttribute('allow_mutation', 'frozen', True),
-        ConfigAttribute('frozen', 'frozen', False),
-    ]
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Any]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ) -> None:
-        super().__init__(
-            reference=reference,
-            fields=fields,
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
-        config_parameters: Dict[str, Any] = {}
-
-        extra = self._get_config_extra()
-        if extra:
-            config_parameters['extra'] = extra
-
-        for from_, to, invert in self.CONFIG_ATTRIBUTES:
-            if from_ in self.extra_template_data:
-                config_parameters[to] = (
-                    not self.extra_template_data[from_]
-                    if invert
-                    else self.extra_template_data[from_]
-                )
-        for data_type in self.all_data_types:
-            if data_type.is_custom_type:  # pragma: no cover
-                config_parameters['arbitrary_types_allowed'] = True
-                break
-
-        for field in self.fields:
-            # Check if a regex pattern uses lookarounds.
-            # Depending on the generation configuration, the pattern may end up in two different places.
-            pattern = (
-                isinstance(field.constraints, Constraints) and field.constraints.pattern
-            ) or (field.data_type.kwargs or {}).get('pattern')
-            if pattern and re.search(r'\(\?<?[=!]', pattern):
-                config_parameters['regex_engine'] = '"python-re"'
-                break
-
-        if isinstance(self.extra_template_data.get('config'), dict):
-            for key, value in self.extra_template_data['config'].items():
-                config_parameters[key] = value
-
-        if config_parameters:
-            from datamodel_code_generator.model.pydantic_v2 import ConfigDict
-
-            self.extra_template_data['config'] = ConfigDict.parse_obj(config_parameters)
-            self._additional_imports.append(IMPORT_CONFIG_DICT)
-
-    def _get_config_extra(self) -> Optional[Literal["'allow'", "'forbid'"]]:
-        additionalProperties = self.extra_template_data.get('additionalProperties')
-        allow_extra_fields = self.extra_template_data.get('allow_extra_fields')
-        if additionalProperties is not None or allow_extra_fields:
-            return (
-                "'allow'" if additionalProperties or allow_extra_fields else "'forbid'"
-            )
-        return None
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic_v2/imports.py 0.34.0-1/datamodel_code_generator/model/pydantic_v2/imports.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic_v2/imports.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/pydantic_v2/imports.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-from datamodel_code_generator.imports import Import
-
-IMPORT_CONFIG_DICT = Import.from_full_path('pydantic.ConfigDict')
-IMPORT_AWARE_DATETIME = Import.from_full_path('pydantic.AwareDatetime')
-IMPORT_NAIVE_DATETIME = Import.from_full_path('pydantic.NaiveDatetime')
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic_v2/root_model.py 0.34.0-1/datamodel_code_generator/model/pydantic_v2/root_model.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic_v2/root_model.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/pydantic_v2/root_model.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,25 +0,0 @@
-from __future__ import annotations
-
-from typing import Any, ClassVar, Literal, Optional
-
-from datamodel_code_generator.model.pydantic_v2.base_model import BaseModel
-
-
-class RootModel(BaseModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'pydantic_v2/RootModel.jinja2'
-    BASE_CLASS: ClassVar[str] = 'pydantic.RootModel'
-
-    def __init__(
-        self,
-        **kwargs: Any,
-    ) -> None:
-        # Remove custom_base_class for Pydantic V2 models; behaviour is different from Pydantic V1 as it will not
-        # be treated as a root model. custom_base_class cannot both implement BaseModel and RootModel!
-        if 'custom_base_class' in kwargs:
-            kwargs.pop('custom_base_class')
-
-        super().__init__(**kwargs)
-
-    def _get_config_extra(self) -> Optional[Literal["'allow'", "'forbid'"]]:
-        # PydanticV2 RootModels cannot have extra fields
-        return None
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic_v2/types.py 0.34.0-1/datamodel_code_generator/model/pydantic_v2/types.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic_v2/types.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/pydantic_v2/types.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,43 +0,0 @@
-from __future__ import annotations
-
-from typing import ClassVar, Dict, Optional, Sequence, Type
-
-from datamodel_code_generator.format import DatetimeClassType
-from datamodel_code_generator.model.pydantic import DataTypeManager as _DataTypeManager
-from datamodel_code_generator.model.pydantic.imports import IMPORT_CONSTR
-from datamodel_code_generator.model.pydantic_v2.imports import (
-    IMPORT_AWARE_DATETIME,
-    IMPORT_NAIVE_DATETIME,
-)
-from datamodel_code_generator.types import DataType, StrictTypes, Types
-
-
-class DataTypeManager(_DataTypeManager):
-    PATTERN_KEY: ClassVar[str] = 'pattern'
-
-    def type_map_factory(
-        self,
-        data_type: Type[DataType],
-        strict_types: Sequence[StrictTypes],
-        pattern_key: str,
-        target_datetime_class: Optional[DatetimeClassType] = None,
-    ) -> Dict[Types, DataType]:
-        result = {
-            **super().type_map_factory(
-                data_type, strict_types, pattern_key, target_datetime_class
-            ),
-            Types.hostname: self.data_type.from_import(
-                IMPORT_CONSTR,
-                strict=StrictTypes.str in strict_types,
-                # https://github.com/horejsek/python-fastjsonschema/blob/61c6997a8348b8df9b22e029ca2ba35ef441fbb8/fastjsonschema/draft04.py#L31
-                kwargs={
-                    pattern_key: r"r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])$'",
-                    **({'strict': True} if StrictTypes.str in strict_types else {}),
-                },
-            ),
-        }
-        if target_datetime_class == DatetimeClassType.Awaredatetime:
-            result[Types.date_time] = data_type.from_import(IMPORT_AWARE_DATETIME)
-        if target_datetime_class == DatetimeClassType.Naivedatetime:
-            result[Types.date_time] = data_type.from_import(IMPORT_NAIVE_DATETIME)
-        return result
diff -pruN 0.26.4-3/datamodel_code_generator/model/rootmodel.py 0.34.0-1/datamodel_code_generator/model/rootmodel.py
--- 0.26.4-3/datamodel_code_generator/model/rootmodel.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/rootmodel.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,9 +0,0 @@
-from __future__ import annotations
-
-from typing import ClassVar
-
-from datamodel_code_generator.model import DataModel
-
-
-class RootModel(DataModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'root.jinja2'
diff -pruN 0.26.4-3/datamodel_code_generator/model/scalar.py 0.34.0-1/datamodel_code_generator/model/scalar.py
--- 0.26.4-3/datamodel_code_generator/model/scalar.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/scalar.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,80 +0,0 @@
-from __future__ import annotations
-
-from collections import defaultdict
-from pathlib import Path
-from typing import Any, ClassVar, DefaultDict, Dict, List, Optional, Tuple
-
-from datamodel_code_generator.imports import IMPORT_TYPE_ALIAS, Import
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model.base import UNDEFINED
-from datamodel_code_generator.reference import Reference
-
-_INT: str = 'int'
-_FLOAT: str = 'float'
-_BOOLEAN: str = 'bool'
-_STR: str = 'str'
-
-# default graphql scalar types
-DEFAULT_GRAPHQL_SCALAR_TYPE = _STR
-
-DEFAULT_GRAPHQL_SCALAR_TYPES: Dict[str, str] = {
-    'Boolean': _BOOLEAN,
-    'String': _STR,
-    'ID': _STR,
-    'Int': _INT,
-    'Float': _FLOAT,
-}
-
-
-class DataTypeScalar(DataModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'Scalar.jinja2'
-    BASE_CLASS: ClassVar[str] = ''
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_TYPE_ALIAS,)
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        methods: Optional[List[str]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ):
-        extra_template_data = extra_template_data or defaultdict(dict)
-
-        scalar_name = reference.name
-        if scalar_name not in extra_template_data:
-            extra_template_data[scalar_name] = defaultdict(dict)
-
-        # py_type
-        py_type = extra_template_data[scalar_name].get(
-            'py_type',
-            DEFAULT_GRAPHQL_SCALAR_TYPES.get(
-                reference.name, DEFAULT_GRAPHQL_SCALAR_TYPE
-            ),
-        )
-        extra_template_data[scalar_name]['py_type'] = py_type
-
-        super().__init__(
-            reference=reference,
-            fields=fields,
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            methods=methods,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/Enum.jinja2 0.34.0-1/datamodel_code_generator/model/template/Enum.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/Enum.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/template/Enum.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,17 +0,0 @@
-{% for decorator in decorators -%}
-{{ decorator }}
-{% endfor -%}
-class {{ class_name }}({{ base_class }}):
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- for field in fields %}
-    {{ field.name }} = {{ field.default }}
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- endfor -%}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/Scalar.jinja2 0.34.0-1/datamodel_code_generator/model/template/Scalar.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/Scalar.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/template/Scalar.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-{{ class_name }}: TypeAlias = {{ py_type }}
-{%- if description %}
-"""
-{{ description }}
-"""
-{%- endif %}
\ No newline at end of file
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/TypedDict.jinja2 0.34.0-1/datamodel_code_generator/model/template/TypedDict.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/TypedDict.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/template/TypedDict.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-{%- if is_functional_syntax %}
-{% include 'TypedDictFunction.jinja2' %}
-{%- else %}
-{% include 'TypedDictClass.jinja2' %}
-{%- endif %}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/TypedDictClass.jinja2 0.34.0-1/datamodel_code_generator/model/template/TypedDictClass.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/TypedDictClass.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/template/TypedDictClass.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,17 +0,0 @@
-class {{ class_name }}({{ base_class }}):
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- if not fields and not description %}
-    pass
-{%- endif %}
-{%- for field in fields %}
-    {{ field.name }}: {{ field.type_hint }}
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- endfor -%}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/TypedDictFunction.jinja2 0.34.0-1/datamodel_code_generator/model/template/TypedDictFunction.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/TypedDictFunction.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/template/TypedDictFunction.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,16 +0,0 @@
-{%- if description %}
-"""
-{{ description | indent(4) }}
-"""
-{%- endif %}
-{{ class_name }} = TypedDict('{{ class_name }}', {
-{%- for field in all_fields %}
-    '{{ field.key }}': {{ field.type_hint }},
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- endfor -%}
-})
-
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/Union.jinja2 0.34.0-1/datamodel_code_generator/model/template/Union.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/Union.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/template/Union.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,10 +0,0 @@
-{%- if description %}
-# {{ description }}
-{%- endif %}
-{%- if fields|length > 1 %}
-{{ class_name }}: TypeAlias = Union[
-{%- for field in fields %}
-    '{{ field.name }}',
-{%- endfor %}
-]{% else %}
-{{ class_name }}: TypeAlias = {{ fields[0].name }}{% endif %}
\ No newline at end of file
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/dataclass.jinja2 0.34.0-1/datamodel_code_generator/model/template/dataclass.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/dataclass.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/template/dataclass.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,32 +0,0 @@
-{% for decorator in decorators -%}
-{{ decorator }}
-{% endfor -%}
-@dataclass{%- if keyword_only -%}(kw_only=True){%- endif %}
-{%- if base_class %}
-class {{ class_name }}({{ base_class }}):
-{%- else %}
-class {{ class_name }}:
-{%- endif %}
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- if not fields and not description %}
-    pass
-{%- endif %}
-{%- for field in fields -%}
-    {%- if field.field %}
-    {{ field.name }}: {{ field.type_hint }} = {{ field.field }}
-    {%- else %}
-    {{ field.name }}: {{ field.type_hint }}
-    {%- if not (field.required or (field.represented_default == 'None' and field.strip_default_none))
-            %} = {{ field.represented_default }}
-    {%- endif -%}
-    {%- endif %}
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- endfor -%}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/msgspec.jinja2 0.34.0-1/datamodel_code_generator/model/template/msgspec.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/msgspec.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/template/msgspec.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,42 +0,0 @@
-{% for decorator in decorators -%}
-{{ decorator }}
-{% endfor -%}
-{%- if base_class %}
-class {{ class_name }}({{ base_class }}{%- for key, value in (base_class_kwargs|default({})).items() -%}
-, {{ key }}={{ value }}
-{%- endfor -%}):
-{%- else %}
-class {{ class_name }}:
-{%- endif %}
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- if not fields and not description %}
-    pass
-{%- endif %}
-{%- for field in fields -%}
-    {%- if not field.annotated and field.field %}
-    {{ field.name }}: {{ field.type_hint }} = {{ field.field }}
-    {%- else %}
-    {%- if field.annotated and not field.field %}
-    {{ field.name }}: {{ field.annotated }}
-    {%- elif field.annotated and field.field %}
-    {{ field.name }}: {{ field.annotated }} = {{ field.field }}
-    {%- else %}
-    {{ field.name }}: {{ field.type_hint }}
-    {%- endif %}
-    {%- if not field.field and (not field.required or field.data_type.is_optional or field.nullable)
-            %} = {{ field.represented_default }}
-    {%- endif -%}
-    {%- endif %}
-
-
-
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- endfor -%}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 0.34.0-1/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,39 +0,0 @@
-{% for decorator in decorators -%}
-{{ decorator }}
-{% endfor -%}
-class {{ class_name }}({{ base_class }}):{% if comment is defined %}  # {{ comment }}{% endif %}
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- if not fields and not description %}
-    pass
-{%- endif %}
-{%- if config %}
-{%- filter indent(4) %}
-{% include 'Config.jinja2' %}
-{%- endfilter %}
-{%- endif %}
-{%- for field in fields -%}
-    {%- if not field.annotated and field.field %}
-    {{ field.name }}: {{ field.type_hint }} = {{ field.field }}
-    {%- else %}
-    {%- if field.annotated %}
-    {{ field.name }}: {{ field.annotated }}
-    {%- else %}
-    {{ field.name }}: {{ field.type_hint }}
-    {%- endif %}
-    {%- if not (field.required or (field.represented_default == 'None' and field.strip_default_none))
-            %} = {{ field.represented_default }}
-    {%- endif -%}
-    {%- endif %}
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- for method in methods -%}
-    {{ method }}
-{%- endfor -%}
-{%- endfor -%}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 0.34.0-1/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,36 +0,0 @@
-{% for decorator in decorators -%}
-{{ decorator }}
-{% endfor -%}
-class {{ class_name }}({{ base_class }}):{% if comment is defined %}  # {{ comment }}{% endif %}
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- if config %}
-{%- filter indent(4) %}
-{% include 'Config.jinja2' %}
-{%- endfilter %}
-{%- endif %}
-{%- if not fields and not description %}
-    pass
-{%- else %}
-    {%- set field = fields[0] %}
-    {%- if not field.annotated and field.field %}
-    __root__: {{ field.type_hint }} = {{ field.field }}
-    {%- else %}
-    {%- if field.annotated %}
-    __root__: {{ field.annotated }}
-    {%- else %}
-    __root__: {{ field.type_hint }}
-    {%- endif %}
-    {%- if not (field.required or (field.represented_default == 'None' and field.strip_default_none))
-            %} = {{ field.represented_default }}
-    {%- endif -%}
-    {%- endif %}
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- endif %}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/pydantic/Config.jinja2 0.34.0-1/datamodel_code_generator/model/template/pydantic/Config.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/pydantic/Config.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/template/pydantic/Config.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-class Config:
-{%- for field_name, value in config.dict(exclude_unset=True).items() %}
-    {{ field_name }} = {{ value }}
-{%- endfor %}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/pydantic/dataclass.jinja2 0.34.0-1/datamodel_code_generator/model/template/pydantic/dataclass.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/pydantic/dataclass.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/template/pydantic/dataclass.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,29 +0,0 @@
-{% for decorator in decorators -%}
-{{ decorator }}
-{% endfor -%}
-@dataclass
-{%- if base_class %}
-class {{ class_name }}({{ base_class }}):
-{%- else %}
-class {{ class_name }}:
-{%- endif %}
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- if not fields %}
-    pass
-{%- endif %}
-{%- for field in fields -%}
-    {%- if field.default %}
-    {{ field.name }}: {{ field.type_hint }} = {{field.default}}
-    {%- else %}
-    {{ field.name }}: {{ field.type_hint }}
-    {%- endif %}
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- endfor -%}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2 0.34.0-1/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,39 +0,0 @@
-{% for decorator in decorators -%}
-{{ decorator }}
-{% endfor -%}
-class {{ class_name }}({{ base_class }}):{% if comment is defined %}  # {{ comment }}{% endif %}
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- if not fields and not description %}
-    pass
-{%- endif %}
-{%- if config %}
-{%- filter indent(4) %}
-{% include 'ConfigDict.jinja2' %}
-{%- endfilter %}
-{%- endif %}
-{%- for field in fields -%}
-    {%- if not field.annotated and field.field %}
-    {{ field.name }}: {{ field.type_hint }} = {{ field.field }}
-    {%- else %}
-    {%- if field.annotated %}
-    {{ field.name }}: {{ field.annotated }}
-    {%- else %}
-    {{ field.name }}: {{ field.type_hint }}
-    {%- endif %}
-    {%- if not (field.required or (field.represented_default == 'None' and field.strip_default_none)) or field.data_type.is_optional
-            %} = {{ field.represented_default }}
-    {%- endif -%}
-    {%- endif %}
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- for method in methods -%}
-    {{ method }}
-{%- endfor -%}
-{%- endfor -%}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2 0.34.0-1/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-model_config = ConfigDict(
-{%- for field_name, value in config.dict(exclude_unset=True).items() %}
-    {{ field_name }}={{ value }},
-{%- endfor %}
-)
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2 0.34.0-1/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,45 +0,0 @@
-{%- macro get_type_hint(_fields) -%}
-{%- if _fields -%}
-{#There will only ever be a single field for RootModel#}
-{{- _fields[0].type_hint}}
-{%- endif -%}
-{%- endmacro -%}
-
-
-{% for decorator in decorators -%}
-{{ decorator }}
-{% endfor -%}
-
-class {{ class_name }}({{ base_class }}{%- if fields -%}[{{get_type_hint(fields)}}]{%- endif -%}):{% if comment is defined %}  # {{ comment }}{% endif %}
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- if config %}
-{%- filter indent(4) %}
-{% include 'ConfigDict.jinja2' %}
-{%- endfilter %}
-{%- endif %}
-{%- if not fields and not description %}
-    pass
-{%- else %}
-    {%- set field = fields[0] %}
-    {%- if not field.annotated and field.field %}
-    root: {{ field.type_hint }} = {{ field.field }}
-    {%- else %}
-    {%- if field.annotated %}
-    root: {{ field.annotated }}
-    {%- else %}
-    root: {{ field.type_hint }}
-    {%- endif %}
-    {%- if not (field.required or (field.represented_default == 'None' and field.strip_default_none))
-            %} = {{ field.represented_default }}
-    {%- endif -%}
-    {%- endif %}
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- endif %}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/root.jinja2 0.34.0-1/datamodel_code_generator/model/template/root.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/root.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/template/root.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-{%- set field = fields[0] %}
-{%- if field.annotated %}
-{{ class_name }} = {{ field.annotated }}
-{%- else %}
-{{ class_name }} = {{ field.type_hint }}
-{%- endif %}
diff -pruN 0.26.4-3/datamodel_code_generator/model/typed_dict.py 0.34.0-1/datamodel_code_generator/model/typed_dict.py
--- 0.26.4-3/datamodel_code_generator/model/typed_dict.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/typed_dict.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,155 +0,0 @@
-import keyword
-from pathlib import Path
-from typing import (
-    Any,
-    ClassVar,
-    DefaultDict,
-    Dict,
-    Iterator,
-    List,
-    Optional,
-    Tuple,
-)
-
-from datamodel_code_generator.imports import Import
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model.base import UNDEFINED
-from datamodel_code_generator.model.imports import (
-    IMPORT_NOT_REQUIRED,
-    IMPORT_NOT_REQUIRED_BACKPORT,
-    IMPORT_TYPED_DICT,
-    IMPORT_TYPED_DICT_BACKPORT,
-)
-from datamodel_code_generator.reference import Reference
-from datamodel_code_generator.types import NOT_REQUIRED_PREFIX
-
-escape_characters = str.maketrans(
-    {
-        '\\': r'\\',
-        "'": r'\'',
-        '\b': r'\b',
-        '\f': r'\f',
-        '\n': r'\n',
-        '\r': r'\r',
-        '\t': r'\t',
-    }
-)
-
-
-def _is_valid_field_name(field: DataModelFieldBase) -> bool:
-    name = field.original_name or field.name
-    if name is None:  # pragma: no cover
-        return False
-    return name.isidentifier() and not keyword.iskeyword(name)
-
-
-class TypedDict(DataModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'TypedDict.jinja2'
-    BASE_CLASS: ClassVar[str] = 'typing.TypedDict'
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_TYPED_DICT,)
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        methods: Optional[List[str]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ) -> None:
-        super().__init__(
-            reference=reference,
-            fields=fields,
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            methods=methods,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
-
-    @property
-    def is_functional_syntax(self) -> bool:
-        return any(not _is_valid_field_name(f) for f in self.fields)
-
-    @property
-    def all_fields(self) -> Iterator[DataModelFieldBase]:
-        for base_class in self.base_classes:
-            if base_class.reference is None:  # pragma: no cover
-                continue
-            data_model = base_class.reference.source
-            if not isinstance(data_model, DataModel):  # pragma: no cover
-                continue
-
-            if isinstance(data_model, TypedDict):  # pragma: no cover
-                yield from data_model.all_fields
-
-        yield from self.fields
-
-    def render(self, *, class_name: Optional[str] = None) -> str:
-        response = self._render(
-            class_name=class_name or self.class_name,
-            fields=self.fields,
-            decorators=self.decorators,
-            base_class=self.base_class,
-            methods=self.methods,
-            description=self.description,
-            is_functional_syntax=self.is_functional_syntax,
-            all_fields=self.all_fields,
-            **self.extra_template_data,
-        )
-        return response
-
-
-class TypedDictBackport(TypedDict):
-    BASE_CLASS: ClassVar[str] = 'typing_extensions.TypedDict'
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_TYPED_DICT_BACKPORT,)
-
-
-class DataModelField(DataModelFieldBase):
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_NOT_REQUIRED,)
-
-    @property
-    def key(self) -> str:
-        return (self.original_name or self.name or '').translate(  # pragma: no cover
-            escape_characters
-        )
-
-    @property
-    def type_hint(self) -> str:
-        type_hint = super().type_hint
-        if self._not_required:
-            return f'{NOT_REQUIRED_PREFIX}{type_hint}]'
-        return type_hint
-
-    @property
-    def _not_required(self) -> bool:
-        return not self.required and isinstance(self.parent, TypedDict)
-
-    @property
-    def fall_back_to_nullable(self) -> bool:
-        return not self._not_required
-
-    @property
-    def imports(self) -> Tuple[Import, ...]:
-        return (
-            *super().imports,
-            *(self.DEFAULT_IMPORTS if self._not_required else ()),
-        )
-
-
-class DataModelFieldBackport(DataModelField):
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_NOT_REQUIRED_BACKPORT,)
diff -pruN 0.26.4-3/datamodel_code_generator/model/types.py 0.34.0-1/datamodel_code_generator/model/types.py
--- 0.26.4-3/datamodel_code_generator/model/types.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/types.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,85 +0,0 @@
-from typing import Any, Dict, Optional, Sequence, Type
-
-from datamodel_code_generator import DatetimeClassType, PythonVersion
-from datamodel_code_generator.imports import (
-    IMPORT_ANY,
-    IMPORT_DECIMAL,
-    IMPORT_TIMEDELTA,
-)
-from datamodel_code_generator.types import DataType, StrictTypes, Types
-from datamodel_code_generator.types import DataTypeManager as _DataTypeManager
-
-
-def type_map_factory(data_type: Type[DataType]) -> Dict[Types, DataType]:
-    data_type_int = data_type(type='int')
-    data_type_float = data_type(type='float')
-    data_type_str = data_type(type='str')
-    return {
-        # TODO: Should we support a special type such UUID?
-        Types.integer: data_type_int,
-        Types.int32: data_type_int,
-        Types.int64: data_type_int,
-        Types.number: data_type_float,
-        Types.float: data_type_float,
-        Types.double: data_type_float,
-        Types.decimal: data_type.from_import(IMPORT_DECIMAL),
-        Types.time: data_type_str,
-        Types.string: data_type_str,
-        Types.byte: data_type_str,  # base64 encoded string
-        Types.binary: data_type(type='bytes'),
-        Types.date: data_type_str,
-        Types.date_time: data_type_str,
-        Types.timedelta: data_type.from_import(IMPORT_TIMEDELTA),
-        Types.password: data_type_str,
-        Types.email: data_type_str,
-        Types.uuid: data_type_str,
-        Types.uuid1: data_type_str,
-        Types.uuid2: data_type_str,
-        Types.uuid3: data_type_str,
-        Types.uuid4: data_type_str,
-        Types.uuid5: data_type_str,
-        Types.uri: data_type_str,
-        Types.hostname: data_type_str,
-        Types.ipv4: data_type_str,
-        Types.ipv6: data_type_str,
-        Types.ipv4_network: data_type_str,
-        Types.ipv6_network: data_type_str,
-        Types.boolean: data_type(type='bool'),
-        Types.object: data_type.from_import(IMPORT_ANY, is_dict=True),
-        Types.null: data_type(type='None'),
-        Types.array: data_type.from_import(IMPORT_ANY, is_list=True),
-        Types.any: data_type.from_import(IMPORT_ANY),
-    }
-
-
-class DataTypeManager(_DataTypeManager):
-    def __init__(
-        self,
-        python_version: PythonVersion = PythonVersion.PY_38,
-        use_standard_collections: bool = False,
-        use_generic_container_types: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        use_union_operator: bool = False,
-        use_pendulum: bool = False,
-        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
-    ):
-        super().__init__(
-            python_version,
-            use_standard_collections,
-            use_generic_container_types,
-            strict_types,
-            use_non_positive_negative_number_constrained_types,
-            use_union_operator,
-            use_pendulum,
-            target_datetime_class,
-        )
-
-        self.type_map: Dict[Types, DataType] = type_map_factory(self.data_type)
-
-    def get_data_type(
-        self,
-        types: Types,
-        **_: Any,
-    ) -> DataType:
-        return self.type_map[types]
diff -pruN 0.26.4-3/datamodel_code_generator/model/union.py 0.34.0-1/datamodel_code_generator/model/union.py
--- 0.26.4-3/datamodel_code_generator/model/union.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/model/union.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,51 +0,0 @@
-from __future__ import annotations
-
-from pathlib import Path
-from typing import Any, ClassVar, DefaultDict, Dict, List, Optional, Tuple
-
-from datamodel_code_generator.imports import IMPORT_TYPE_ALIAS, IMPORT_UNION, Import
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model.base import UNDEFINED
-from datamodel_code_generator.reference import Reference
-
-
-class DataTypeUnion(DataModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'Union.jinja2'
-    BASE_CLASS: ClassVar[str] = ''
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (
-        IMPORT_TYPE_ALIAS,
-        IMPORT_UNION,
-    )
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        methods: Optional[List[str]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ):
-        super().__init__(
-            reference=reference,
-            fields=fields,
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            methods=methods,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
diff -pruN 0.26.4-3/datamodel_code_generator/parser/__init__.py 0.34.0-1/datamodel_code_generator/parser/__init__.py
--- 0.26.4-3/datamodel_code_generator/parser/__init__.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.34.0-1/datamodel_code_generator/parser/__init__.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,33 +0,0 @@
-from __future__ import annotations
-
-from enum import Enum
-from typing import Callable, Dict, Optional, TypeVar
-
-TK = TypeVar('TK')
-TV = TypeVar('TV')
-
-
-class LiteralType(Enum):
-    All = 'all'
-    One = 'one'
-
-
-class DefaultPutDict(Dict[TK, TV]):
-    def get_or_put(
-        self,
-        key: TK,
-        default: Optional[TV] = None,
-        default_factory: Optional[Callable[[TK], TV]] = None,
-    ) -> TV:
-        if key in self:
-            return self[key]
-        elif default:  # pragma: no cover
-            value = self[key] = default
-            return value
-        elif default_factory:
-            value = self[key] = default_factory(key)
-            return value
-        raise ValueError('Not found default and default_factory')  # pragma: no cover
-
-
-__all__ = ['LiteralType']
diff -pruN 0.26.4-3/datamodel_code_generator/parser/base.py 0.34.0-1/datamodel_code_generator/parser/base.py
--- 0.26.4-3/datamodel_code_generator/parser/base.py	2024-12-15 17:25:57.706037000 +0000
+++ 0.34.0-1/datamodel_code_generator/parser/base.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,1501 +0,0 @@
-import re
-import sys
-from abc import ABC, abstractmethod
-from collections import OrderedDict, defaultdict
-from itertools import groupby
-from pathlib import Path
-from typing import (
-    Any,
-    Callable,
-    DefaultDict,
-    Dict,
-    Iterable,
-    Iterator,
-    List,
-    Mapping,
-    NamedTuple,
-    Optional,
-    Sequence,
-    Set,
-    Tuple,
-    Type,
-    TypeVar,
-    Union,
-)
-from urllib.parse import ParseResult
-
-from pydantic import BaseModel
-
-from datamodel_code_generator.format import (
-    CodeFormatter,
-    DatetimeClassType,
-    PythonVersion,
-)
-from datamodel_code_generator.imports import (
-    IMPORT_ANNOTATIONS,
-    IMPORT_LITERAL,
-    IMPORT_LITERAL_BACKPORT,
-    Import,
-    Imports,
-)
-from datamodel_code_generator.model import dataclass as dataclass_model
-from datamodel_code_generator.model import msgspec as msgspec_model
-from datamodel_code_generator.model import pydantic as pydantic_model
-from datamodel_code_generator.model import pydantic_v2 as pydantic_model_v2
-from datamodel_code_generator.model.base import (
-    ALL_MODEL,
-    UNDEFINED,
-    BaseClassDataType,
-    ConstraintsBase,
-    DataModel,
-    DataModelFieldBase,
-)
-from datamodel_code_generator.model.enum import Enum, Member
-from datamodel_code_generator.parser import DefaultPutDict, LiteralType
-from datamodel_code_generator.reference import ModelResolver, Reference
-from datamodel_code_generator.types import DataType, DataTypeManager, StrictTypes
-from datamodel_code_generator.util import Protocol, runtime_checkable
-
-SPECIAL_PATH_FORMAT: str = '#-datamodel-code-generator-#-{}-#-special-#'
-
-
-def get_special_path(keyword: str, path: List[str]) -> List[str]:
-    return [*path, SPECIAL_PATH_FORMAT.format(keyword)]
-
-
-escape_characters = str.maketrans(
-    {
-        '\\': r'\\',
-        "'": r'\'',
-        '\b': r'\b',
-        '\f': r'\f',
-        '\n': r'\n',
-        '\r': r'\r',
-        '\t': r'\t',
-    }
-)
-
-
-def to_hashable(item: Any) -> Any:
-    if isinstance(
-        item,
-        (
-            list,
-            tuple,
-        ),
-    ):
-        return tuple(sorted(to_hashable(i) for i in item))
-    elif isinstance(item, dict):
-        return tuple(
-            sorted(
-                (
-                    k,
-                    to_hashable(v),
-                )
-                for k, v in item.items()
-            )
-        )
-    elif isinstance(item, set):  # pragma: no cover
-        return frozenset(to_hashable(i) for i in item)
-    elif isinstance(item, BaseModel):
-        return to_hashable(item.dict())
-    return item
-
-
-def dump_templates(templates: List[DataModel]) -> str:
-    return '\n\n\n'.join(str(m) for m in templates)
-
-
-ReferenceMapSet = Dict[str, Set[str]]
-SortedDataModels = Dict[str, DataModel]
-
-MAX_RECURSION_COUNT: int = sys.getrecursionlimit()
-
-
-def sort_data_models(
-    unsorted_data_models: List[DataModel],
-    sorted_data_models: Optional[SortedDataModels] = None,
-    require_update_action_models: Optional[List[str]] = None,
-    recursion_count: int = MAX_RECURSION_COUNT,
-) -> Tuple[List[DataModel], SortedDataModels, List[str]]:
-    if sorted_data_models is None:
-        sorted_data_models = OrderedDict()
-    if require_update_action_models is None:
-        require_update_action_models = []
-    sorted_model_count: int = len(sorted_data_models)
-
-    unresolved_references: List[DataModel] = []
-    for model in unsorted_data_models:
-        if not model.reference_classes:
-            sorted_data_models[model.path] = model
-        elif (
-            model.path in model.reference_classes and len(model.reference_classes) == 1
-        ):  # only self-referencing
-            sorted_data_models[model.path] = model
-            require_update_action_models.append(model.path)
-        elif (
-            not model.reference_classes - {model.path} - set(sorted_data_models)
-        ):  # reference classes have been resolved
-            sorted_data_models[model.path] = model
-            if model.path in model.reference_classes:
-                require_update_action_models.append(model.path)
-        else:
-            unresolved_references.append(model)
-    if unresolved_references:
-        if sorted_model_count != len(sorted_data_models) and recursion_count:
-            try:
-                return sort_data_models(
-                    unresolved_references,
-                    sorted_data_models,
-                    require_update_action_models,
-                    recursion_count - 1,
-                )
-            except RecursionError:  # pragma: no cover
-                pass
-
-        # sort on base_class dependency
-        while True:
-            ordered_models: List[Tuple[int, DataModel]] = []
-            unresolved_reference_model_names = [m.path for m in unresolved_references]
-            for model in unresolved_references:
-                indexes = [
-                    unresolved_reference_model_names.index(b.reference.path)
-                    for b in model.base_classes
-                    if b.reference
-                    and b.reference.path in unresolved_reference_model_names
-                ]
-                if indexes:
-                    ordered_models.append(
-                        (
-                            max(indexes),
-                            model,
-                        )
-                    )
-                else:
-                    ordered_models.append(
-                        (
-                            -1,
-                            model,
-                        )
-                    )
-            sorted_unresolved_models = [
-                m[1] for m in sorted(ordered_models, key=lambda m: m[0])
-            ]
-            if sorted_unresolved_models == unresolved_references:
-                break
-            unresolved_references = sorted_unresolved_models
-
-        # circular reference
-        unsorted_data_model_names = set(unresolved_reference_model_names)
-        for model in unresolved_references:
-            unresolved_model = (
-                model.reference_classes - {model.path} - set(sorted_data_models)
-            )
-            base_models = [
-                getattr(s.reference, 'path', None) for s in model.base_classes
-            ]
-            update_action_parent = set(require_update_action_models).intersection(
-                base_models
-            )
-            if not unresolved_model:
-                sorted_data_models[model.path] = model
-                if update_action_parent:
-                    require_update_action_models.append(model.path)
-                continue
-            if not unresolved_model - unsorted_data_model_names:
-                sorted_data_models[model.path] = model
-                require_update_action_models.append(model.path)
-                continue
-            # unresolved
-            unresolved_classes = ', '.join(
-                f'[class: {item.path} references: {item.reference_classes}]'
-                for item in unresolved_references
-            )
-            raise Exception(f'A Parser can not resolve classes: {unresolved_classes}.')
-    return unresolved_references, sorted_data_models, require_update_action_models
-
-
-def relative(current_module: str, reference: str) -> Tuple[str, str]:
-    """Find relative module path."""
-
-    current_module_path = current_module.split('.') if current_module else []
-    *reference_path, name = reference.split('.')
-
-    if current_module_path == reference_path:
-        return '', ''
-
-    i = 0
-    for x, y in zip(current_module_path, reference_path):
-        if x != y:
-            break
-        i += 1
-
-    left = '.' * (len(current_module_path) - i)
-    right = '.'.join(reference_path[i:])
-
-    if not left:
-        left = '.'
-    if not right:
-        right = name
-    elif '.' in right:
-        extra, right = right.rsplit('.', 1)
-        left += extra
-
-    return left, right
-
-
-def exact_import(from_: str, import_: str, short_name: str) -> Tuple[str, str]:
-    if from_ == len(from_) * '.':
-        # Prevents "from . import foo" becoming "from ..foo import Foo"
-        # or "from .. import foo" becoming "from ...foo import Foo"
-        # when our imported module has the same parent
-        return f'{from_}{import_}', short_name
-    return f'{from_}.{import_}', short_name
-
-
-@runtime_checkable
-class Child(Protocol):
-    @property
-    def parent(self) -> Optional[Any]:
-        raise NotImplementedError
-
-
-T = TypeVar('T')
-
-
-def get_most_of_parent(value: Any, type_: Optional[Type[T]] = None) -> Optional[T]:
-    if isinstance(value, Child) and (type_ is None or not isinstance(value, type_)):
-        return get_most_of_parent(value.parent, type_)
-    return value
-
-
-def title_to_class_name(title: str) -> str:
-    classname = re.sub('[^A-Za-z0-9]+', ' ', title)
-    classname = ''.join(x for x in classname.title() if not x.isspace())
-    return classname
-
-
-def _find_base_classes(model: DataModel) -> List[DataModel]:
-    return [
-        b.reference.source
-        for b in model.base_classes
-        if b.reference and isinstance(b.reference.source, DataModel)
-    ]
-
-
-def _find_field(
-    original_name: str, models: List[DataModel]
-) -> Optional[DataModelFieldBase]:
-    def _find_field_and_base_classes(
-        model_: DataModel,
-    ) -> Tuple[Optional[DataModelFieldBase], List[DataModel]]:
-        for field_ in model_.fields:
-            if field_.original_name == original_name:
-                return field_, []
-        return None, _find_base_classes(model_)  # pragma: no cover
-
-    for model in models:
-        field, base_models = _find_field_and_base_classes(model)
-        if field:
-            return field
-        models.extend(base_models)  # pragma: no cover
-
-    return None  # pragma: no cover
-
-
-def _copy_data_types(data_types: List[DataType]) -> List[DataType]:
-    copied_data_types: List[DataType] = []
-    for data_type_ in data_types:
-        if data_type_.reference:
-            copied_data_types.append(
-                data_type_.__class__(reference=data_type_.reference)
-            )
-        elif data_type_.data_types:  # pragma: no cover
-            copied_data_type = data_type_.copy()
-            copied_data_type.data_types = _copy_data_types(data_type_.data_types)
-            copied_data_types.append(copied_data_type)
-        else:
-            copied_data_types.append(data_type_.copy())
-    return copied_data_types
-
-
-class Result(BaseModel):
-    body: str
-    source: Optional[Path] = None
-
-
-class Source(BaseModel):
-    path: Path
-    text: str
-
-    @classmethod
-    def from_path(cls, path: Path, base_path: Path, encoding: str) -> 'Source':
-        return cls(
-            path=path.relative_to(base_path),
-            text=path.read_text(encoding=encoding),
-        )
-
-
-class Parser(ABC):
-    def __init__(
-        self,
-        source: Union[str, Path, List[Path], ParseResult],
-        *,
-        data_model_type: Type[DataModel] = pydantic_model.BaseModel,
-        data_model_root_type: Type[DataModel] = pydantic_model.CustomRootType,
-        data_type_manager_type: Type[DataTypeManager] = pydantic_model.DataTypeManager,
-        data_model_field_type: Type[DataModelFieldBase] = pydantic_model.DataModelField,
-        base_class: Optional[str] = None,
-        additional_imports: Optional[List[str]] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        target_python_version: PythonVersion = PythonVersion.PY_38,
-        dump_resolve_reference_action: Optional[Callable[[Iterable[str]], str]] = None,
-        validation: bool = False,
-        field_constraints: bool = False,
-        snake_case_field: bool = False,
-        strip_default_none: bool = False,
-        aliases: Optional[Mapping[str, str]] = None,
-        allow_population_by_field_name: bool = False,
-        apply_default_values_for_required_fields: bool = False,
-        allow_extra_fields: bool = False,
-        force_optional_for_required_fields: bool = False,
-        class_name: Optional[str] = None,
-        use_standard_collections: bool = False,
-        base_path: Optional[Path] = None,
-        use_schema_description: bool = False,
-        use_field_description: bool = False,
-        use_default_kwarg: bool = False,
-        reuse_model: bool = False,
-        encoding: str = 'utf-8',
-        enum_field_as_literal: Optional[LiteralType] = None,
-        set_default_enum_member: bool = False,
-        use_subclass_enum: bool = False,
-        strict_nullable: bool = False,
-        use_generic_container_types: bool = False,
-        enable_faux_immutability: bool = False,
-        remote_text_cache: Optional[DefaultPutDict[str, str]] = None,
-        disable_appending_item_suffix: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        empty_enum_field_name: Optional[str] = None,
-        custom_class_name_generator: Optional[
-            Callable[[str], str]
-        ] = title_to_class_name,
-        field_extra_keys: Optional[Set[str]] = None,
-        field_include_all_keys: bool = False,
-        field_extra_keys_without_x_prefix: Optional[Set[str]] = None,
-        wrap_string_literal: Optional[bool] = None,
-        use_title_as_name: bool = False,
-        use_operation_id_as_name: bool = False,
-        use_unique_items_as_set: bool = False,
-        http_headers: Optional[Sequence[Tuple[str, str]]] = None,
-        http_ignore_tls: bool = False,
-        use_annotated: bool = False,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        original_field_name_delimiter: Optional[str] = None,
-        use_double_quotes: bool = False,
-        use_union_operator: bool = False,
-        allow_responses_without_content: bool = False,
-        collapse_root_models: bool = False,
-        special_field_name_prefix: Optional[str] = None,
-        remove_special_field_name_prefix: bool = False,
-        capitalise_enum_members: bool = False,
-        keep_model_order: bool = False,
-        use_one_literal_as_default: bool = False,
-        known_third_party: Optional[List[str]] = None,
-        custom_formatters: Optional[List[str]] = None,
-        custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
-        use_pendulum: bool = False,
-        http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
-        treat_dots_as_module: bool = False,
-        use_exact_imports: bool = False,
-        default_field_extras: Optional[Dict[str, Any]] = None,
-        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
-        keyword_only: bool = False,
-        no_alias: bool = False,
-    ) -> None:
-        self.keyword_only = keyword_only
-        self.data_type_manager: DataTypeManager = data_type_manager_type(
-            python_version=target_python_version,
-            use_standard_collections=use_standard_collections,
-            use_generic_container_types=use_generic_container_types,
-            strict_types=strict_types,
-            use_union_operator=use_union_operator,
-            use_pendulum=use_pendulum,
-            target_datetime_class=target_datetime_class,
-        )
-        self.data_model_type: Type[DataModel] = data_model_type
-        self.data_model_root_type: Type[DataModel] = data_model_root_type
-        self.data_model_field_type: Type[DataModelFieldBase] = data_model_field_type
-
-        self.imports: Imports = Imports(use_exact_imports)
-        self.use_exact_imports: bool = use_exact_imports
-        self._append_additional_imports(additional_imports=additional_imports)
-
-        self.base_class: Optional[str] = base_class
-        self.target_python_version: PythonVersion = target_python_version
-        self.results: List[DataModel] = []
-        self.dump_resolve_reference_action: Optional[Callable[[Iterable[str]], str]] = (
-            dump_resolve_reference_action
-        )
-        self.validation: bool = validation
-        self.field_constraints: bool = field_constraints
-        self.snake_case_field: bool = snake_case_field
-        self.strip_default_none: bool = strip_default_none
-        self.apply_default_values_for_required_fields: bool = (
-            apply_default_values_for_required_fields
-        )
-        self.force_optional_for_required_fields: bool = (
-            force_optional_for_required_fields
-        )
-        self.use_schema_description: bool = use_schema_description
-        self.use_field_description: bool = use_field_description
-        self.use_default_kwarg: bool = use_default_kwarg
-        self.reuse_model: bool = reuse_model
-        self.encoding: str = encoding
-        self.enum_field_as_literal: Optional[LiteralType] = enum_field_as_literal
-        self.set_default_enum_member: bool = set_default_enum_member
-        self.use_subclass_enum: bool = use_subclass_enum
-        self.strict_nullable: bool = strict_nullable
-        self.use_generic_container_types: bool = use_generic_container_types
-        self.use_union_operator: bool = use_union_operator
-        self.enable_faux_immutability: bool = enable_faux_immutability
-        self.custom_class_name_generator: Optional[Callable[[str], str]] = (
-            custom_class_name_generator
-        )
-        self.field_extra_keys: Set[str] = field_extra_keys or set()
-        self.field_extra_keys_without_x_prefix: Set[str] = (
-            field_extra_keys_without_x_prefix or set()
-        )
-        self.field_include_all_keys: bool = field_include_all_keys
-
-        self.remote_text_cache: DefaultPutDict[str, str] = (
-            remote_text_cache or DefaultPutDict()
-        )
-        self.current_source_path: Optional[Path] = None
-        self.use_title_as_name: bool = use_title_as_name
-        self.use_operation_id_as_name: bool = use_operation_id_as_name
-        self.use_unique_items_as_set: bool = use_unique_items_as_set
-
-        if base_path:
-            self.base_path = base_path
-        elif isinstance(source, Path):
-            self.base_path = (
-                source.absolute() if source.is_dir() else source.absolute().parent
-            )
-        else:
-            self.base_path = Path.cwd()
-
-        self.source: Union[str, Path, List[Path], ParseResult] = source
-        self.custom_template_dir = custom_template_dir
-        self.extra_template_data: DefaultDict[str, Any] = (
-            extra_template_data or defaultdict(dict)
-        )
-
-        if allow_population_by_field_name:
-            self.extra_template_data[ALL_MODEL]['allow_population_by_field_name'] = True
-
-        if allow_extra_fields:
-            self.extra_template_data[ALL_MODEL]['allow_extra_fields'] = True
-
-        if enable_faux_immutability:
-            self.extra_template_data[ALL_MODEL]['allow_mutation'] = False
-
-        self.model_resolver = ModelResolver(
-            base_url=source.geturl() if isinstance(source, ParseResult) else None,
-            singular_name_suffix='' if disable_appending_item_suffix else None,
-            aliases=aliases,
-            empty_field_name=empty_enum_field_name,
-            snake_case_field=snake_case_field,
-            custom_class_name_generator=custom_class_name_generator,
-            base_path=self.base_path,
-            original_field_name_delimiter=original_field_name_delimiter,
-            special_field_name_prefix=special_field_name_prefix,
-            remove_special_field_name_prefix=remove_special_field_name_prefix,
-            capitalise_enum_members=capitalise_enum_members,
-            no_alias=no_alias,
-        )
-        self.class_name: Optional[str] = class_name
-        self.wrap_string_literal: Optional[bool] = wrap_string_literal
-        self.http_headers: Optional[Sequence[Tuple[str, str]]] = http_headers
-        self.http_query_parameters: Optional[Sequence[Tuple[str, str]]] = (
-            http_query_parameters
-        )
-        self.http_ignore_tls: bool = http_ignore_tls
-        self.use_annotated: bool = use_annotated
-        if self.use_annotated and not self.field_constraints:  # pragma: no cover
-            raise Exception(
-                '`use_annotated=True` has to be used with `field_constraints=True`'
-            )
-        self.use_non_positive_negative_number_constrained_types = (
-            use_non_positive_negative_number_constrained_types
-        )
-        self.use_double_quotes = use_double_quotes
-        self.allow_responses_without_content = allow_responses_without_content
-        self.collapse_root_models = collapse_root_models
-        self.capitalise_enum_members = capitalise_enum_members
-        self.keep_model_order = keep_model_order
-        self.use_one_literal_as_default = use_one_literal_as_default
-        self.known_third_party = known_third_party
-        self.custom_formatter = custom_formatters
-        self.custom_formatters_kwargs = custom_formatters_kwargs
-        self.treat_dots_as_module = treat_dots_as_module
-        self.default_field_extras: Optional[Dict[str, Any]] = default_field_extras
-
-    @property
-    def iter_source(self) -> Iterator[Source]:
-        if isinstance(self.source, str):
-            yield Source(path=Path(), text=self.source)
-        elif isinstance(self.source, Path):  # pragma: no cover
-            if self.source.is_dir():
-                for path in sorted(self.source.rglob('*'), key=lambda p: p.name):
-                    if path.is_file():
-                        yield Source.from_path(path, self.base_path, self.encoding)
-            else:
-                yield Source.from_path(self.source, self.base_path, self.encoding)
-        elif isinstance(self.source, list):  # pragma: no cover
-            for path in self.source:
-                yield Source.from_path(path, self.base_path, self.encoding)
-        else:
-            yield Source(
-                path=Path(self.source.path),
-                text=self.remote_text_cache.get_or_put(
-                    self.source.geturl(), default_factory=self._get_text_from_url
-                ),
-            )
-
-    def _append_additional_imports(
-        self, additional_imports: Optional[List[str]]
-    ) -> None:
-        if additional_imports is None:
-            additional_imports = []
-
-        for additional_import_string in additional_imports:
-            if additional_import_string is None:
-                continue
-            new_import = Import.from_full_path(additional_import_string)
-            self.imports.append(new_import)
-
-    def _get_text_from_url(self, url: str) -> str:
-        from datamodel_code_generator.http import get_body
-
-        return self.remote_text_cache.get_or_put(
-            url,
-            default_factory=lambda url_: get_body(
-                url, self.http_headers, self.http_ignore_tls, self.http_query_parameters
-            ),
-        )
-
-    @classmethod
-    def get_url_path_parts(cls, url: ParseResult) -> List[str]:
-        return [
-            f'{url.scheme}://{url.hostname}',
-            *url.path.split('/')[1:],
-        ]
-
-    @property
-    def data_type(self) -> Type[DataType]:
-        return self.data_type_manager.data_type
-
-    @abstractmethod
-    def parse_raw(self) -> None:
-        raise NotImplementedError
-
-    def __delete_duplicate_models(self, models: List[DataModel]) -> None:
-        model_class_names: Dict[str, DataModel] = {}
-        model_to_duplicate_models: DefaultDict[DataModel, List[DataModel]] = (
-            defaultdict(list)
-        )
-        for model in models[:]:
-            if isinstance(model, self.data_model_root_type):
-                root_data_type = model.fields[0].data_type
-
-                # backward compatible
-                # Remove duplicated root model
-                if (
-                    root_data_type.reference
-                    and not root_data_type.is_dict
-                    and not root_data_type.is_list
-                    and root_data_type.reference.source in models
-                    and root_data_type.reference.name
-                    == self.model_resolver.get_class_name(
-                        model.reference.original_name, unique=False
-                    ).name
-                ):
-                    # Replace referenced duplicate model to original model
-                    for child in model.reference.children[:]:
-                        child.replace_reference(root_data_type.reference)
-                    models.remove(model)
-                    for data_type in model.all_data_types:
-                        if data_type.reference:
-                            data_type.remove_reference()
-                    continue
-
-                #  Custom root model can't be inherited on restriction of Pydantic
-                for child in model.reference.children:
-                    # inheritance model
-                    if isinstance(child, DataModel):
-                        for base_class in child.base_classes[:]:
-                            if base_class.reference == model.reference:
-                                child.base_classes.remove(base_class)
-                        if not child.base_classes:  # pragma: no cover
-                            child.set_base_class()
-
-            class_name = model.duplicate_class_name or model.class_name
-            if class_name in model_class_names:
-                model_key = tuple(
-                    to_hashable(v)
-                    for v in (
-                        model.render(class_name=model.duplicate_class_name),
-                        model.imports,
-                    )
-                )
-                original_model = model_class_names[class_name]
-                original_model_key = tuple(
-                    to_hashable(v)
-                    for v in (
-                        original_model.render(
-                            class_name=original_model.duplicate_class_name
-                        ),
-                        original_model.imports,
-                    )
-                )
-                if model_key == original_model_key:
-                    model_to_duplicate_models[original_model].append(model)
-                    continue
-            model_class_names[class_name] = model
-        for model, duplicate_models in model_to_duplicate_models.items():
-            for duplicate_model in duplicate_models:
-                for child in duplicate_model.reference.children[:]:
-                    child.replace_reference(model.reference)
-                models.remove(duplicate_model)
-
-    @classmethod
-    def __replace_duplicate_name_in_module(cls, models: List[DataModel]) -> None:
-        scoped_model_resolver = ModelResolver(
-            exclude_names={i.alias or i.import_ for m in models for i in m.imports},
-            duplicate_name_suffix='Model',
-        )
-
-        model_names: Dict[str, DataModel] = {}
-        for model in models:
-            class_name: str = model.class_name
-            generated_name: str = scoped_model_resolver.add(
-                [model.path], class_name, unique=True, class_name=True
-            ).name
-            if class_name != generated_name:
-                model.class_name = generated_name
-            model_names[model.class_name] = model
-
-        for model in models:
-            duplicate_name = model.duplicate_class_name
-            # check only first desired name
-            if duplicate_name and duplicate_name not in model_names:
-                del model_names[model.class_name]
-                model.class_name = duplicate_name
-                model_names[duplicate_name] = model
-
-    def __change_from_import(
-        self,
-        models: List[DataModel],
-        imports: Imports,
-        scoped_model_resolver: ModelResolver,
-        init: bool,
-    ) -> None:
-        for model in models:
-            scoped_model_resolver.add([model.path], model.class_name)
-        for model in models:
-            before_import = model.imports
-            imports.append(before_import)
-            for data_type in model.all_data_types:
-                # To change from/import
-
-                if not data_type.reference or data_type.reference.source in models:
-                    # No need to import non-reference model.
-                    # Or, Referenced model is in the same file. we don't need to import the model
-                    continue
-
-                if isinstance(data_type, BaseClassDataType):
-                    left, right = relative(model.module_name, data_type.full_name)
-                    from_ = (
-                        ''.join([left, right])
-                        if left.endswith('.')
-                        else '.'.join([left, right])
-                    )
-                    import_ = data_type.reference.short_name
-                    full_path = from_, import_
-                else:
-                    from_, import_ = full_path = relative(
-                        model.module_name, data_type.full_name
-                    )
-                    if imports.use_exact:  # pragma: no cover
-                        from_, import_ = exact_import(
-                            from_, import_, data_type.reference.short_name
-                        )
-                    import_ = import_.replace('-', '_')
-                    if (
-                        len(model.module_path) > 1
-                        and model.module_path[-1].count('.') > 0
-                        and not self.treat_dots_as_module
-                    ):
-                        rel_path_depth = model.module_path[-1].count('.')
-                        from_ = from_[rel_path_depth:]
-
-                alias = scoped_model_resolver.add(full_path, import_).name
-
-                name = data_type.reference.short_name
-                if from_ and import_ and alias != name:
-                    data_type.alias = (
-                        alias
-                        if data_type.reference.short_name == import_
-                        else f'{alias}.{name}'
-                    )
-
-                if init:
-                    from_ = '.' + from_
-                imports.append(
-                    Import(
-                        from_=from_,
-                        import_=import_,
-                        alias=alias,
-                        reference_path=data_type.reference.path,
-                    ),
-                )
-            after_import = model.imports
-            if before_import != after_import:
-                imports.append(after_import)
-
-    @classmethod
-    def __extract_inherited_enum(cls, models: List[DataModel]) -> None:
-        for model in models[:]:
-            if model.fields:
-                continue
-            enums: List[Enum] = []
-            for base_model in model.base_classes:
-                if not base_model.reference:
-                    continue
-                source_model = base_model.reference.source
-                if isinstance(source_model, Enum):
-                    enums.append(source_model)
-            if enums:
-                models.insert(
-                    models.index(model),
-                    enums[0].__class__(
-                        fields=[f for e in enums for f in e.fields],
-                        description=model.description,
-                        reference=model.reference,
-                    ),
-                )
-                models.remove(model)
-
-    def __apply_discriminator_type(
-        self,
-        models: List[DataModel],
-        imports: Imports,
-    ) -> None:
-        for model in models:
-            for field in model.fields:
-                discriminator = field.extras.get('discriminator')
-                if not discriminator or not isinstance(discriminator, dict):
-                    continue
-                property_name = discriminator.get('propertyName')
-                if not property_name:  # pragma: no cover
-                    continue
-                mapping = discriminator.get('mapping', {})
-                for data_type in field.data_type.data_types:
-                    if not data_type.reference:  # pragma: no cover
-                        continue
-                    discriminator_model = data_type.reference.source
-
-                    if not isinstance(  # pragma: no cover
-                        discriminator_model,
-                        (
-                            pydantic_model.BaseModel,
-                            pydantic_model_v2.BaseModel,
-                            dataclass_model.DataClass,
-                            msgspec_model.Struct,
-                        ),
-                    ):
-                        continue  # pragma: no cover
-
-                    type_names: List[str] = []
-
-                    def check_paths(
-                        model: Union[
-                            pydantic_model.BaseModel,
-                            pydantic_model_v2.BaseModel,
-                            Reference,
-                        ],
-                        mapping: Dict[str, str],
-                        type_names: List[str] = type_names,
-                    ) -> None:
-                        """Helper function to validate paths for a given model."""
-                        for name, path in mapping.items():
-                            if (
-                                model.path.split('#/')[-1] != path.split('#/')[-1]
-                            ) and (
-                                path.startswith('#/')
-                                or model.path[:-1] != path.split('/')[-1]
-                            ):
-                                t_path = path[str(path).find('/') + 1 :]
-                                t_disc = model.path[: str(model.path).find('#')].lstrip(
-                                    '../'
-                                )
-                                t_disc_2 = '/'.join(t_disc.split('/')[1:])
-                                if t_path != t_disc and t_path != t_disc_2:
-                                    continue
-                            type_names.append(name)
-
-                    # Check the main discriminator model path
-                    if mapping:
-                        check_paths(discriminator_model, mapping)
-
-                        # Check the base_classes if they exist
-                        if len(type_names) == 0:
-                            for base_class in discriminator_model.base_classes:
-                                check_paths(base_class.reference, mapping)
-                    else:
-                        type_names = [discriminator_model.path.split('/')[-1]]
-                    if not type_names:  # pragma: no cover
-                        raise RuntimeError(
-                            f'Discriminator type is not found. {data_type.reference.path}'
-                        )
-                    has_one_literal = False
-                    for discriminator_field in discriminator_model.fields:
-                        if (
-                            discriminator_field.original_name
-                            or discriminator_field.name
-                        ) != property_name:
-                            continue
-                        literals = discriminator_field.data_type.literals
-                        if len(literals) == 1 and literals[0] == (
-                            type_names[0] if type_names else None
-                        ):
-                            has_one_literal = True
-                            if isinstance(
-                                discriminator_model, msgspec_model.Struct
-                            ):  # pragma: no cover
-                                discriminator_model.add_base_class_kwarg(
-                                    'tag_field', f"'{property_name}'"
-                                )
-                                discriminator_model.add_base_class_kwarg(
-                                    'tag', discriminator_field.represented_default
-                                )
-                                discriminator_field.extras['is_classvar'] = True
-                            # Found the discriminator field, no need to keep looking
-                            break
-                        for (
-                            field_data_type
-                        ) in discriminator_field.data_type.all_data_types:
-                            if field_data_type.reference:  # pragma: no cover
-                                field_data_type.remove_reference()
-                        discriminator_field.data_type = self.data_type(
-                            literals=type_names
-                        )
-                        discriminator_field.data_type.parent = discriminator_field
-                        discriminator_field.required = True
-                        imports.append(discriminator_field.imports)
-                        has_one_literal = True
-                    if not has_one_literal:
-                        discriminator_model.fields.append(
-                            self.data_model_field_type(
-                                name=property_name,
-                                data_type=self.data_type(literals=type_names),
-                                required=True,
-                            )
-                        )
-                    literal = (
-                        IMPORT_LITERAL
-                        if self.target_python_version.has_literal_type
-                        else IMPORT_LITERAL_BACKPORT
-                    )
-                    has_imported_literal = any(
-                        literal == import_  # type: ignore [comparison-overlap]
-                        for import_ in imports
-                    )
-                    if has_imported_literal:  # pragma: no cover
-                        imports.append(literal)
-
-    @classmethod
-    def _create_set_from_list(cls, data_type: DataType) -> Optional[DataType]:
-        if data_type.is_list:
-            new_data_type = data_type.copy()
-            new_data_type.is_list = False
-            new_data_type.is_set = True
-            for data_type_ in new_data_type.data_types:
-                data_type_.parent = new_data_type
-            return new_data_type
-        elif data_type.data_types:  # pragma: no cover
-            for index, nested_data_type in enumerate(data_type.data_types[:]):
-                set_data_type = cls._create_set_from_list(nested_data_type)
-                if set_data_type:  # pragma: no cover
-                    data_type.data_types[index] = set_data_type
-            return data_type
-        return None  # pragma: no cover
-
-    def __replace_unique_list_to_set(self, models: List[DataModel]) -> None:
-        for model in models:
-            for model_field in model.fields:
-                if not self.use_unique_items_as_set:
-                    continue
-
-                if not (
-                    model_field.constraints and model_field.constraints.unique_items
-                ):
-                    continue
-                set_data_type = self._create_set_from_list(model_field.data_type)
-                if set_data_type:  # pragma: no cover
-                    model_field.data_type.parent = None
-                    model_field.data_type = set_data_type
-                    set_data_type.parent = model_field
-
-    @classmethod
-    def __set_reference_default_value_to_field(cls, models: List[DataModel]) -> None:
-        for model in models:
-            for model_field in model.fields:
-                if not model_field.data_type.reference or model_field.has_default:
-                    continue
-                if isinstance(
-                    model_field.data_type.reference.source, DataModel
-                ):  # pragma: no cover
-                    if model_field.data_type.reference.source.default != UNDEFINED:
-                        model_field.default = (
-                            model_field.data_type.reference.source.default
-                        )
-
-    def __reuse_model(
-        self, models: List[DataModel], require_update_action_models: List[str]
-    ) -> None:
-        if not self.reuse_model:
-            return None
-        model_cache: Dict[Tuple[str, ...], Reference] = {}
-        duplicates = []
-        for model in models[:]:
-            model_key = tuple(
-                to_hashable(v) for v in (model.render(class_name='M'), model.imports)
-            )
-            cached_model_reference = model_cache.get(model_key)
-            if cached_model_reference:
-                if isinstance(model, Enum):
-                    for child in model.reference.children[:]:
-                        # child is resolved data_type by reference
-                        data_model = get_most_of_parent(child)
-                        # TODO: replace reference in all modules
-                        if data_model in models:  # pragma: no cover
-                            child.replace_reference(cached_model_reference)
-                    duplicates.append(model)
-                else:
-                    index = models.index(model)
-                    inherited_model = model.__class__(
-                        fields=[],
-                        base_classes=[cached_model_reference],
-                        description=model.description,
-                        reference=Reference(
-                            name=model.name,
-                            path=model.reference.path + '/reuse',
-                        ),
-                        custom_template_dir=model._custom_template_dir,
-                    )
-                    if cached_model_reference.path in require_update_action_models:
-                        require_update_action_models.append(inherited_model.path)
-                    models.insert(index, inherited_model)
-                    models.remove(model)
-
-            else:
-                model_cache[model_key] = model.reference
-
-        for duplicate in duplicates:
-            models.remove(duplicate)
-
-    def __collapse_root_models(
-        self,
-        models: List[DataModel],
-        unused_models: List[DataModel],
-        imports: Imports,
-        scoped_model_resolver: ModelResolver,
-    ) -> None:
-        if not self.collapse_root_models:
-            return None
-
-        for model in models:
-            for model_field in model.fields:
-                for data_type in model_field.data_type.all_data_types:
-                    reference = data_type.reference
-                    if not reference or not isinstance(
-                        reference.source, self.data_model_root_type
-                    ):
-                        continue
-
-                    # Use root-type as model_field type
-                    root_type_model = reference.source
-                    root_type_field = root_type_model.fields[0]
-
-                    if (
-                        self.field_constraints
-                        and isinstance(root_type_field.constraints, ConstraintsBase)
-                        and root_type_field.constraints.has_constraints
-                        and any(
-                            d
-                            for d in model_field.data_type.all_data_types
-                            if d.is_dict or d.is_union
-                        )
-                    ):
-                        continue  # pragma: no cover
-
-                    # set copied data_type
-                    copied_data_type = root_type_field.data_type.copy()
-                    if isinstance(data_type.parent, self.data_model_field_type):
-                        # for field
-                        # override empty field by root-type field
-                        model_field.extras = {
-                            **root_type_field.extras,
-                            **model_field.extras,
-                        }
-                        model_field.process_const()
-
-                        if self.field_constraints:
-                            model_field.constraints = ConstraintsBase.merge_constraints(
-                                root_type_field.constraints, model_field.constraints
-                            )
-
-                        data_type.parent.data_type = copied_data_type
-
-                    elif data_type.parent.is_list:
-                        if self.field_constraints:
-                            model_field.constraints = ConstraintsBase.merge_constraints(
-                                root_type_field.constraints, model_field.constraints
-                            )
-                        if isinstance(
-                            root_type_field,
-                            pydantic_model.DataModelField,
-                        ) and not model_field.extras.get('discriminator'):
-                            discriminator = root_type_field.extras.get('discriminator')
-                            if discriminator:
-                                model_field.extras['discriminator'] = discriminator
-                        data_type.parent.data_types.remove(
-                            data_type
-                        )  # pragma: no cover
-                        data_type.parent.data_types.append(copied_data_type)
-
-                    elif isinstance(data_type.parent, DataType):
-                        # for data_type
-                        data_type_id = id(data_type)
-                        data_type.parent.data_types = [
-                            d
-                            for d in (*data_type.parent.data_types, copied_data_type)
-                            if id(d) != data_type_id
-                        ]
-                    else:  # pragma: no cover
-                        continue
-
-                    for d in root_type_field.data_type.data_types:
-                        if d.reference is None:
-                            continue
-                        from_, import_ = full_path = relative(
-                            model.module_name, d.full_name
-                        )
-                        if from_ and import_:
-                            alias = scoped_model_resolver.add(full_path, import_)
-                            d.alias = (
-                                alias.name
-                                if d.reference.short_name == import_
-                                else f'{alias.name}.{d.reference.short_name}'
-                            )
-                            imports.append(
-                                [
-                                    Import(
-                                        from_=from_,
-                                        import_=import_,
-                                        alias=alias.name,
-                                        reference_path=d.reference.path,
-                                    )
-                                ]
-                            )
-
-                    original_field = get_most_of_parent(data_type, DataModelFieldBase)
-                    if original_field:  # pragma: no cover
-                        # TODO: Improve detection of reference type
-                        imports.append(original_field.imports)
-
-                    data_type.remove_reference()
-
-                    root_type_model.reference.children = [
-                        c
-                        for c in root_type_model.reference.children
-                        if getattr(c, 'parent', None)
-                    ]
-
-                    imports.remove_referenced_imports(root_type_model.path)
-                    if not root_type_model.reference.children:
-                        unused_models.append(root_type_model)
-
-    def __set_default_enum_member(
-        self,
-        models: List[DataModel],
-    ) -> None:
-        if not self.set_default_enum_member:
-            return None
-        for model in models:
-            for model_field in model.fields:
-                if not model_field.default:
-                    continue
-                for data_type in model_field.data_type.all_data_types:
-                    if data_type.reference and isinstance(
-                        data_type.reference.source, Enum
-                    ):  # pragma: no cover
-                        if isinstance(model_field.default, list):
-                            enum_member: Union[List[Member], Optional[Member]] = [
-                                e
-                                for e in (
-                                    data_type.reference.source.find_member(d)
-                                    for d in model_field.default
-                                )
-                                if e
-                            ]
-                        else:
-                            enum_member = data_type.reference.source.find_member(
-                                model_field.default
-                            )
-                        if not enum_member:
-                            continue
-                        model_field.default = enum_member
-                        if data_type.alias:
-                            if isinstance(enum_member, list):
-                                for enum_member_ in enum_member:
-                                    enum_member_.alias = data_type.alias
-                            else:
-                                enum_member.alias = data_type.alias
-
-    def __override_required_field(
-        self,
-        models: List[DataModel],
-    ) -> None:
-        for model in models:
-            if isinstance(model, (Enum, self.data_model_root_type)):
-                continue
-            for index, model_field in enumerate(model.fields[:]):
-                data_type = model_field.data_type
-                if (
-                    not model_field.original_name
-                    or data_type.data_types
-                    or data_type.reference
-                    or data_type.type
-                    or data_type.literals
-                    or data_type.dict_key
-                ):
-                    continue
-
-                original_field = _find_field(
-                    model_field.original_name, _find_base_classes(model)
-                )
-                if not original_field:  # pragma: no cover
-                    model.fields.remove(model_field)
-                    continue
-                copied_original_field = original_field.copy()
-                if original_field.data_type.reference:
-                    data_type = self.data_type_manager.data_type(
-                        reference=original_field.data_type.reference,
-                    )
-                elif original_field.data_type.data_types:
-                    data_type = original_field.data_type.copy()
-                    data_type.data_types = _copy_data_types(
-                        original_field.data_type.data_types
-                    )
-                    for data_type_ in data_type.data_types:
-                        data_type_.parent = data_type
-                else:
-                    data_type = original_field.data_type.copy()
-                data_type.parent = copied_original_field
-                copied_original_field.data_type = data_type
-                copied_original_field.parent = model
-                copied_original_field.required = True
-                model.fields.insert(index, copied_original_field)
-                model.fields.remove(model_field)
-
-    def __sort_models(
-        self,
-        models: List[DataModel],
-        imports: Imports,
-    ) -> None:
-        if not self.keep_model_order:
-            return
-
-        models.sort(key=lambda x: x.class_name)
-
-        imported = {i for v in imports.values() for i in v}
-        model_class_name_baseclasses: Dict[DataModel, Tuple[str, Set[str]]] = {}
-        for model in models:
-            class_name = model.class_name
-            model_class_name_baseclasses[model] = (
-                class_name,
-                {b.type_hint for b in model.base_classes if b.reference} - {class_name},
-            )
-
-        changed: bool = True
-        while changed:
-            changed = False
-            resolved = imported.copy()
-            for i in range(len(models) - 1):
-                model = models[i]
-                class_name, baseclasses = model_class_name_baseclasses[model]
-                if not baseclasses - resolved:
-                    resolved.add(class_name)
-                    continue
-                models[i], models[i + 1] = models[i + 1], model
-                changed = True
-
-    def __set_one_literal_on_default(self, models: List[DataModel]) -> None:
-        if not self.use_one_literal_as_default:
-            return None
-        for model in models:
-            for model_field in model.fields:
-                if not model_field.required or len(model_field.data_type.literals) != 1:
-                    continue
-                model_field.default = model_field.data_type.literals[0]
-                model_field.required = False
-                if model_field.nullable is not True:  # pragma: no cover
-                    model_field.nullable = False
-
-    @classmethod
-    def __postprocess_result_modules(cls, results):
-        def process(input_tuple) -> Tuple[str, ...]:
-            r = []
-            for item in input_tuple:
-                p = item.split('.')
-                if len(p) > 1:
-                    r.extend(p[:-1])
-                    r.append(p[-1])
-                else:
-                    r.append(item)
-
-            r = r[:-2] + [f'{r[-2]}.{r[-1]}']
-            return tuple(r)
-
-        results = {process(k): v for k, v in results.items()}
-
-        init_result = [v for k, v in results.items() if k[-1] == '__init__.py'][0]
-        folders = {t[:-1] if t[-1].endswith('.py') else t for t in results.keys()}
-        for folder in folders:
-            for i in range(len(folder)):
-                subfolder = folder[: i + 1]
-                init_file = subfolder + ('__init__.py',)
-                results.update({init_file: init_result})
-        return results
-
-    def __change_imported_model_name(
-        self,
-        models: List[DataModel],
-        imports: Imports,
-        scoped_model_resolver: ModelResolver,
-    ) -> None:
-        imported_names = {
-            imports.alias[from_][i]
-            if i in imports.alias[from_] and i != imports.alias[from_][i]
-            else i
-            for from_, import_ in imports.items()
-            for i in import_
-        }
-        for model in models:
-            if model.class_name not in imported_names:  # pragma: no cover
-                continue
-
-            model.reference.name = scoped_model_resolver.add(  # pragma: no cover
-                path=get_special_path('imported_name', model.path.split('/')),
-                original_name=model.reference.name,
-                unique=True,
-                class_name=True,
-            ).name
-
-    def parse(
-        self,
-        with_import: Optional[bool] = True,
-        format_: Optional[bool] = True,
-        settings_path: Optional[Path] = None,
-    ) -> Union[str, Dict[Tuple[str, ...], Result]]:
-        self.parse_raw()
-
-        if with_import:
-            if self.target_python_version != PythonVersion.PY_36:
-                self.imports.append(IMPORT_ANNOTATIONS)
-
-        if format_:
-            code_formatter: Optional[CodeFormatter] = CodeFormatter(
-                self.target_python_version,
-                settings_path,
-                self.wrap_string_literal,
-                skip_string_normalization=not self.use_double_quotes,
-                known_third_party=self.known_third_party,
-                custom_formatters=self.custom_formatter,
-                custom_formatters_kwargs=self.custom_formatters_kwargs,
-            )
-        else:
-            code_formatter = None
-
-        _, sorted_data_models, require_update_action_models = sort_data_models(
-            self.results
-        )
-
-        results: Dict[Tuple[str, ...], Result] = {}
-
-        def module_key(data_model: DataModel) -> Tuple[str, ...]:
-            return tuple(data_model.module_path)
-
-        def sort_key(data_model: DataModel) -> Tuple[int, Tuple[str, ...]]:
-            return (len(data_model.module_path), tuple(data_model.module_path))
-
-        # process in reverse order to correctly establish module levels
-        grouped_models = groupby(
-            sorted(sorted_data_models.values(), key=sort_key, reverse=True),
-            key=module_key,
-        )
-
-        module_models: List[Tuple[Tuple[str, ...], List[DataModel]]] = []
-        unused_models: List[DataModel] = []
-        model_to_module_models: Dict[
-            DataModel, Tuple[Tuple[str, ...], List[DataModel]]
-        ] = {}
-        module_to_import: Dict[Tuple[str, ...], Imports] = {}
-
-        previous_module = ()  # type: Tuple[str, ...]
-        for module, models in ((k, [*v]) for k, v in grouped_models):  # type: Tuple[str, ...], List[DataModel]
-            for model in models:
-                model_to_module_models[model] = module, models
-            self.__delete_duplicate_models(models)
-            self.__replace_duplicate_name_in_module(models)
-            if len(previous_module) - len(module) > 1:
-                for parts in range(len(previous_module) - 1, len(module), -1):
-                    module_models.append(
-                        (
-                            previous_module[:parts],
-                            [],
-                        )
-                    )
-            module_models.append(
-                (
-                    module,
-                    models,
-                )
-            )
-            previous_module = module
-
-        class Processed(NamedTuple):
-            module: Tuple[str, ...]
-            models: List[DataModel]
-            init: bool
-            imports: Imports
-            scoped_model_resolver: ModelResolver
-
-        processed_models: List[Processed] = []
-
-        for module, models in module_models:
-            imports = module_to_import[module] = Imports(self.use_exact_imports)
-            init = False
-            if module:
-                parent = (*module[:-1], '__init__.py')
-                if parent not in results:
-                    results[parent] = Result(body='')
-                if (*module, '__init__.py') in results:
-                    module = (*module, '__init__.py')
-                    init = True
-                else:
-                    module = (*module[:-1], f'{module[-1]}.py')
-                    module = tuple(part.replace('-', '_') for part in module)
-            else:
-                module = ('__init__.py',)
-
-            scoped_model_resolver = ModelResolver()
-
-            self.__override_required_field(models)
-            self.__replace_unique_list_to_set(models)
-            self.__change_from_import(models, imports, scoped_model_resolver, init)
-            self.__extract_inherited_enum(models)
-            self.__set_reference_default_value_to_field(models)
-            self.__reuse_model(models, require_update_action_models)
-            self.__collapse_root_models(
-                models, unused_models, imports, scoped_model_resolver
-            )
-            self.__set_default_enum_member(models)
-            self.__sort_models(models, imports)
-            self.__apply_discriminator_type(models, imports)
-            self.__set_one_literal_on_default(models)
-
-            processed_models.append(
-                Processed(module, models, init, imports, scoped_model_resolver)
-            )
-
-        for processed_model in processed_models:
-            for model in processed_model.models:
-                processed_model.imports.append(model.imports)
-
-        for unused_model in unused_models:
-            module, models = model_to_module_models[unused_model]
-            if unused_model in models:  # pragma: no cover
-                imports = module_to_import[module]
-                imports.remove(unused_model.imports)
-                models.remove(unused_model)
-
-        for processed_model in processed_models:
-            # postprocess imports to remove unused imports.
-            model_code = str('\n'.join([str(m) for m in processed_model.models]))
-            unused_imports = [
-                (from_, import_)
-                for from_, imports_ in processed_model.imports.items()
-                for import_ in imports_
-                if import_ not in model_code
-            ]
-            for from_, import_ in unused_imports:
-                processed_model.imports.remove(Import(from_=from_, import_=import_))
-
-        for module, models, init, imports, scoped_model_resolver in processed_models:
-            # process after removing unused models
-            self.__change_imported_model_name(models, imports, scoped_model_resolver)
-
-        for module, models, init, imports, scoped_model_resolver in processed_models:
-            result: List[str] = []
-            if models:
-                if with_import:
-                    result += [str(self.imports), str(imports), '\n']
-
-                code = dump_templates(models)
-                result += [code]
-
-                if self.dump_resolve_reference_action is not None:
-                    result += [
-                        '\n',
-                        self.dump_resolve_reference_action(
-                            m.reference.short_name
-                            for m in models
-                            if m.path in require_update_action_models
-                        ),
-                    ]
-            if not result and not init:
-                continue
-            body = '\n'.join(result)
-            if code_formatter:
-                body = code_formatter.format_code(body)
-
-            results[module] = Result(
-                body=body, source=models[0].file_path if models else None
-            )
-
-        # retain existing behaviour
-        if [*results] == [('__init__.py',)]:
-            return results[('__init__.py',)].body
-
-        results = {tuple(i.replace('-', '_') for i in k): v for k, v in results.items()}
-        results = (
-            self.__postprocess_result_modules(results)
-            if self.treat_dots_as_module
-            else {
-                tuple(
-                    (
-                        part[: part.rfind('.')].replace('.', '_')
-                        + part[part.rfind('.') :]
-                    )
-                    for part in k
-                ): v
-                for k, v in results.items()
-            }
-        )
-
-        return results
diff -pruN 0.26.4-3/datamodel_code_generator/parser/graphql.py 0.34.0-1/datamodel_code_generator/parser/graphql.py
--- 0.26.4-3/datamodel_code_generator/parser/graphql.py	2024-12-15 17:25:57.706037000 +0000
+++ 0.34.0-1/datamodel_code_generator/parser/graphql.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,538 +0,0 @@
-from __future__ import annotations
-
-from pathlib import Path
-from typing import (
-    Any,
-    Callable,
-    DefaultDict,
-    Dict,
-    Iterable,
-    Iterator,
-    List,
-    Mapping,
-    Optional,
-    Sequence,
-    Set,
-    Tuple,
-    Type,
-    Union,
-)
-from urllib.parse import ParseResult
-
-from datamodel_code_generator import (
-    DefaultPutDict,
-    LiteralType,
-    PythonVersion,
-    snooper_to_methods,
-)
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model import pydantic as pydantic_model
-from datamodel_code_generator.model.enum import Enum
-from datamodel_code_generator.model.scalar import DataTypeScalar
-from datamodel_code_generator.model.union import DataTypeUnion
-from datamodel_code_generator.parser.base import (
-    DataType,
-    Parser,
-    Source,
-    escape_characters,
-)
-from datamodel_code_generator.reference import ModelType, Reference
-from datamodel_code_generator.types import DataTypeManager, StrictTypes, Types
-
-try:
-    import graphql
-except ImportError:  # pragma: no cover
-    raise Exception(
-        "Please run `$pip install 'datamodel-code-generator[graphql]`' to generate data-model from a GraphQL schema."
-    )
-
-from datamodel_code_generator.format import DatetimeClassType
-
-graphql_resolver = graphql.type.introspection.TypeResolvers()
-
-
-def build_graphql_schema(schema_str: str) -> graphql.GraphQLSchema:
-    """Build a graphql schema from a string."""
-    schema = graphql.build_schema(schema_str)
-    return graphql.lexicographic_sort_schema(schema)
-
-
-@snooper_to_methods(max_variable_length=None)
-class GraphQLParser(Parser):
-    # raw graphql schema as `graphql-core` object
-    raw_obj: graphql.GraphQLSchema
-    # all processed graphql objects
-    # mapper from an object name (unique) to an object
-    all_graphql_objects: Dict[str, graphql.GraphQLNamedType]
-    # a reference for each object
-    # mapper from an object name to his reference
-    references: Dict[str, Reference] = {}
-    # mapper from graphql type to all objects with this type
-    # `graphql.type.introspection.TypeKind` -- an enum with all supported types
-    # `graphql.GraphQLNamedType` -- base type for each graphql object
-    # see `graphql-core` for more details
-    support_graphql_types: Dict[
-        graphql.type.introspection.TypeKind, List[graphql.GraphQLNamedType]
-    ]
-    # graphql types order for render
-    # may be as a parameter in the future
-    parse_order: List[graphql.type.introspection.TypeKind] = [
-        graphql.type.introspection.TypeKind.SCALAR,
-        graphql.type.introspection.TypeKind.ENUM,
-        graphql.type.introspection.TypeKind.INTERFACE,
-        graphql.type.introspection.TypeKind.OBJECT,
-        graphql.type.introspection.TypeKind.INPUT_OBJECT,
-        graphql.type.introspection.TypeKind.UNION,
-    ]
-
-    def __init__(
-        self,
-        source: Union[str, Path, ParseResult],
-        *,
-        data_model_type: Type[DataModel] = pydantic_model.BaseModel,
-        data_model_root_type: Type[DataModel] = pydantic_model.CustomRootType,
-        data_model_scalar_type: Type[DataModel] = DataTypeScalar,
-        data_model_union_type: Type[DataModel] = DataTypeUnion,
-        data_type_manager_type: Type[DataTypeManager] = pydantic_model.DataTypeManager,
-        data_model_field_type: Type[DataModelFieldBase] = pydantic_model.DataModelField,
-        base_class: Optional[str] = None,
-        additional_imports: Optional[List[str]] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        target_python_version: PythonVersion = PythonVersion.PY_38,
-        dump_resolve_reference_action: Optional[Callable[[Iterable[str]], str]] = None,
-        validation: bool = False,
-        field_constraints: bool = False,
-        snake_case_field: bool = False,
-        strip_default_none: bool = False,
-        aliases: Optional[Mapping[str, str]] = None,
-        allow_population_by_field_name: bool = False,
-        apply_default_values_for_required_fields: bool = False,
-        allow_extra_fields: bool = False,
-        force_optional_for_required_fields: bool = False,
-        class_name: Optional[str] = None,
-        use_standard_collections: bool = False,
-        base_path: Optional[Path] = None,
-        use_schema_description: bool = False,
-        use_field_description: bool = False,
-        use_default_kwarg: bool = False,
-        reuse_model: bool = False,
-        encoding: str = 'utf-8',
-        enum_field_as_literal: Optional[LiteralType] = None,
-        set_default_enum_member: bool = False,
-        use_subclass_enum: bool = False,
-        strict_nullable: bool = False,
-        use_generic_container_types: bool = False,
-        enable_faux_immutability: bool = False,
-        remote_text_cache: Optional[DefaultPutDict[str, str]] = None,
-        disable_appending_item_suffix: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        empty_enum_field_name: Optional[str] = None,
-        custom_class_name_generator: Optional[Callable[[str], str]] = None,
-        field_extra_keys: Optional[Set[str]] = None,
-        field_include_all_keys: bool = False,
-        field_extra_keys_without_x_prefix: Optional[Set[str]] = None,
-        wrap_string_literal: Optional[bool] = None,
-        use_title_as_name: bool = False,
-        use_operation_id_as_name: bool = False,
-        use_unique_items_as_set: bool = False,
-        http_headers: Optional[Sequence[Tuple[str, str]]] = None,
-        http_ignore_tls: bool = False,
-        use_annotated: bool = False,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        original_field_name_delimiter: Optional[str] = None,
-        use_double_quotes: bool = False,
-        use_union_operator: bool = False,
-        allow_responses_without_content: bool = False,
-        collapse_root_models: bool = False,
-        special_field_name_prefix: Optional[str] = None,
-        remove_special_field_name_prefix: bool = False,
-        capitalise_enum_members: bool = False,
-        keep_model_order: bool = False,
-        use_one_literal_as_default: bool = False,
-        known_third_party: Optional[List[str]] = None,
-        custom_formatters: Optional[List[str]] = None,
-        custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
-        use_pendulum: bool = False,
-        http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
-        treat_dots_as_module: bool = False,
-        use_exact_imports: bool = False,
-        default_field_extras: Optional[Dict[str, Any]] = None,
-        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
-        keyword_only: bool = False,
-        no_alias: bool = False,
-    ) -> None:
-        super().__init__(
-            source=source,
-            data_model_type=data_model_type,
-            data_model_root_type=data_model_root_type,
-            data_type_manager_type=data_type_manager_type,
-            data_model_field_type=data_model_field_type,
-            base_class=base_class,
-            additional_imports=additional_imports,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            target_python_version=target_python_version,
-            dump_resolve_reference_action=dump_resolve_reference_action,
-            validation=validation,
-            field_constraints=field_constraints,
-            snake_case_field=snake_case_field,
-            strip_default_none=strip_default_none,
-            aliases=aliases,
-            allow_population_by_field_name=allow_population_by_field_name,
-            allow_extra_fields=allow_extra_fields,
-            apply_default_values_for_required_fields=apply_default_values_for_required_fields,
-            force_optional_for_required_fields=force_optional_for_required_fields,
-            class_name=class_name,
-            use_standard_collections=use_standard_collections,
-            base_path=base_path,
-            use_schema_description=use_schema_description,
-            use_field_description=use_field_description,
-            use_default_kwarg=use_default_kwarg,
-            reuse_model=reuse_model,
-            encoding=encoding,
-            enum_field_as_literal=enum_field_as_literal,
-            use_one_literal_as_default=use_one_literal_as_default,
-            set_default_enum_member=set_default_enum_member,
-            use_subclass_enum=use_subclass_enum,
-            strict_nullable=strict_nullable,
-            use_generic_container_types=use_generic_container_types,
-            enable_faux_immutability=enable_faux_immutability,
-            remote_text_cache=remote_text_cache,
-            disable_appending_item_suffix=disable_appending_item_suffix,
-            strict_types=strict_types,
-            empty_enum_field_name=empty_enum_field_name,
-            custom_class_name_generator=custom_class_name_generator,
-            field_extra_keys=field_extra_keys,
-            field_include_all_keys=field_include_all_keys,
-            field_extra_keys_without_x_prefix=field_extra_keys_without_x_prefix,
-            wrap_string_literal=wrap_string_literal,
-            use_title_as_name=use_title_as_name,
-            use_operation_id_as_name=use_operation_id_as_name,
-            use_unique_items_as_set=use_unique_items_as_set,
-            http_headers=http_headers,
-            http_ignore_tls=http_ignore_tls,
-            use_annotated=use_annotated,
-            use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
-            original_field_name_delimiter=original_field_name_delimiter,
-            use_double_quotes=use_double_quotes,
-            use_union_operator=use_union_operator,
-            allow_responses_without_content=allow_responses_without_content,
-            collapse_root_models=collapse_root_models,
-            special_field_name_prefix=special_field_name_prefix,
-            remove_special_field_name_prefix=remove_special_field_name_prefix,
-            capitalise_enum_members=capitalise_enum_members,
-            keep_model_order=keep_model_order,
-            known_third_party=known_third_party,
-            custom_formatters=custom_formatters,
-            custom_formatters_kwargs=custom_formatters_kwargs,
-            use_pendulum=use_pendulum,
-            http_query_parameters=http_query_parameters,
-            treat_dots_as_module=treat_dots_as_module,
-            use_exact_imports=use_exact_imports,
-            default_field_extras=default_field_extras,
-            target_datetime_class=target_datetime_class,
-            keyword_only=keyword_only,
-            no_alias=no_alias,
-        )
-
-        self.data_model_scalar_type = data_model_scalar_type
-        self.data_model_union_type = data_model_union_type
-        self.use_standard_collections = use_standard_collections
-        self.use_union_operator = use_union_operator
-
-    def _get_context_source_path_parts(self) -> Iterator[Tuple[Source, List[str]]]:
-        # TODO (denisart): Temporarily this method duplicates
-        # the method `datamodel_code_generator.parser.jsonschema.JsonSchemaParser._get_context_source_path_parts`.
-
-        if isinstance(self.source, list) or (  # pragma: no cover
-            isinstance(self.source, Path) and self.source.is_dir()
-        ):  # pragma: no cover
-            self.current_source_path = Path()
-            self.model_resolver.after_load_files = {
-                self.base_path.joinpath(s.path).resolve().as_posix()
-                for s in self.iter_source
-            }
-
-        for source in self.iter_source:
-            if isinstance(self.source, ParseResult):  # pragma: no cover
-                path_parts = self.get_url_path_parts(self.source)
-            else:
-                path_parts = list(source.path.parts)
-            if self.current_source_path is not None:  # pragma: no cover
-                self.current_source_path = source.path
-            with self.model_resolver.current_base_path_context(
-                source.path.parent
-            ), self.model_resolver.current_root_context(path_parts):
-                yield source, path_parts
-
-    def _resolve_types(self, paths: List[str], schema: graphql.GraphQLSchema) -> None:
-        for type_name, type_ in schema.type_map.items():
-            if type_name.startswith('__'):
-                continue
-
-            if type_name in ['Query', 'Mutation']:
-                continue
-
-            resolved_type = graphql_resolver.kind(type_, None)
-
-            if resolved_type in self.support_graphql_types:  # pragma: no cover
-                self.all_graphql_objects[type_.name] = type_
-                # TODO: need a special method for each graph type
-                self.references[type_.name] = Reference(
-                    path=f'{str(*paths)}/{resolved_type.value}/{type_.name}',
-                    name=type_.name,
-                    original_name=type_.name,
-                )
-
-                self.support_graphql_types[resolved_type].append(type_)
-
-    def _typename_field(self, name: str) -> DataModelFieldBase:
-        return self.data_model_field_type(
-            name='typename__',
-            data_type=DataType(
-                literals=[name],
-                use_union_operator=self.use_union_operator,
-                use_standard_collections=self.use_standard_collections,
-            ),
-            default=name,
-            use_annotated=self.use_annotated,
-            required=False,
-            alias='__typename',
-            use_one_literal_as_default=True,
-            has_default=True,
-        )
-
-    def _get_default(
-        self,
-        field: Union[graphql.GraphQLField, graphql.GraphQLInputField],
-        final_data_type: DataType,
-        required: bool,
-    ) -> Any:
-        if isinstance(field, graphql.GraphQLInputField):  # pragma: no cover
-            if field.default_value == graphql.pyutils.Undefined:  # pragma: no cover
-                return None
-            return field.default_value
-        if required is False:
-            if final_data_type.is_list:
-                return None
-
-        return None
-
-    def parse_scalar(self, scalar_graphql_object: graphql.GraphQLScalarType) -> None:
-        self.results.append(
-            self.data_model_scalar_type(
-                reference=self.references[scalar_graphql_object.name],
-                fields=[],
-                custom_template_dir=self.custom_template_dir,
-                extra_template_data=self.extra_template_data,
-                description=scalar_graphql_object.description,
-            )
-        )
-
-    def parse_enum(self, enum_object: graphql.GraphQLEnumType) -> None:
-        enum_fields: List[DataModelFieldBase] = []
-        exclude_field_names: Set[str] = set()
-
-        for value_name, value in enum_object.values.items():
-            default = (
-                f"'{value_name.translate(escape_characters)}'"
-                if isinstance(value_name, str)
-                else value_name
-            )
-
-            field_name = self.model_resolver.get_valid_field_name(
-                value_name, excludes=exclude_field_names, model_type=ModelType.ENUM
-            )
-            exclude_field_names.add(field_name)
-
-            enum_fields.append(
-                self.data_model_field_type(
-                    name=field_name,
-                    data_type=self.data_type_manager.get_data_type(
-                        Types.string,
-                    ),
-                    default=default,
-                    required=True,
-                    strip_default_none=self.strip_default_none,
-                    has_default=True,
-                    use_field_description=value.description is not None,
-                    original_name=None,
-                )
-            )
-
-        enum = Enum(
-            reference=self.references[enum_object.name],
-            fields=enum_fields,
-            path=self.current_source_path,
-            description=enum_object.description,
-            custom_template_dir=self.custom_template_dir,
-        )
-        self.results.append(enum)
-
-    def parse_field(
-        self,
-        field_name: str,
-        alias: str,
-        field: Union[graphql.GraphQLField, graphql.GraphQLInputField],
-    ) -> DataModelFieldBase:
-        final_data_type = DataType(
-            is_optional=True,
-            use_union_operator=self.use_union_operator,
-            use_standard_collections=self.use_standard_collections,
-        )
-        data_type = final_data_type
-        obj = field.type
-
-        while graphql.is_list_type(obj) or graphql.is_non_null_type(obj):
-            if graphql.is_list_type(obj):
-                data_type.is_list = True
-
-                new_data_type = DataType(
-                    is_optional=True,
-                    use_union_operator=self.use_union_operator,
-                    use_standard_collections=self.use_standard_collections,
-                )
-                data_type.data_types = [new_data_type]
-
-                data_type = new_data_type
-            elif graphql.is_non_null_type(obj):  # pragma: no cover
-                data_type.is_optional = False
-
-            obj = obj.of_type
-
-        data_type.type = obj.name
-
-        required = (not self.force_optional_for_required_fields) and (
-            not final_data_type.is_optional
-        )
-
-        default = self._get_default(field, final_data_type, required)
-        extras = (
-            {}
-            if self.default_field_extras is None
-            else self.default_field_extras.copy()
-        )
-
-        if field.description is not None:  # pragma: no cover
-            extras['description'] = field.description
-
-        return self.data_model_field_type(
-            name=field_name,
-            default=default,
-            data_type=final_data_type,
-            required=required,
-            extras=extras,
-            alias=alias,
-            strip_default_none=self.strip_default_none,
-            use_annotated=self.use_annotated,
-            use_field_description=self.use_field_description,
-            use_default_kwarg=self.use_default_kwarg,
-            original_name=field_name,
-            has_default=default is not None,
-        )
-
-    def parse_object_like(
-        self,
-        obj: Union[
-            graphql.GraphQLInterfaceType,
-            graphql.GraphQLObjectType,
-            graphql.GraphQLInputObjectType,
-        ],
-    ) -> None:
-        fields = []
-        exclude_field_names: Set[str] = set()
-
-        for field_name, field in obj.fields.items():
-            field_name_, alias = self.model_resolver.get_valid_field_name_and_alias(
-                field_name, excludes=exclude_field_names
-            )
-            exclude_field_names.add(field_name_)
-
-            data_model_field_type = self.parse_field(field_name_, alias, field)
-            fields.append(data_model_field_type)
-
-        fields.append(self._typename_field(obj.name))
-
-        base_classes = []
-        if hasattr(obj, 'interfaces'):  # pragma: no cover
-            base_classes = [self.references[i.name] for i in obj.interfaces]
-
-        data_model_type = self.data_model_type(
-            reference=self.references[obj.name],
-            fields=fields,
-            base_classes=base_classes,
-            custom_base_class=self.base_class,
-            custom_template_dir=self.custom_template_dir,
-            extra_template_data=self.extra_template_data,
-            path=self.current_source_path,
-            description=obj.description,
-            keyword_only=self.keyword_only,
-        )
-        self.results.append(data_model_type)
-
-    def parse_interface(
-        self, interface_graphql_object: graphql.GraphQLInterfaceType
-    ) -> None:
-        self.parse_object_like(interface_graphql_object)
-
-    def parse_object(self, graphql_object: graphql.GraphQLObjectType) -> None:
-        self.parse_object_like(graphql_object)
-
-    def parse_input_object(
-        self, input_graphql_object: graphql.GraphQLInputObjectType
-    ) -> None:
-        self.parse_object_like(input_graphql_object)  # pragma: no cover
-
-    def parse_union(self, union_object: graphql.GraphQLUnionType) -> None:
-        fields = []
-
-        for type_ in union_object.types:
-            fields.append(
-                self.data_model_field_type(name=type_.name, data_type=DataType())
-            )
-
-        data_model_type = self.data_model_union_type(
-            reference=self.references[union_object.name],
-            fields=fields,
-            custom_base_class=self.base_class,
-            custom_template_dir=self.custom_template_dir,
-            extra_template_data=self.extra_template_data,
-            path=self.current_source_path,
-            description=union_object.description,
-        )
-        self.results.append(data_model_type)
-
-    def parse_raw(self) -> None:
-        self.all_graphql_objects = {}
-        self.references: Dict[str, Reference] = {}
-
-        self.support_graphql_types = {
-            graphql.type.introspection.TypeKind.SCALAR: [],
-            graphql.type.introspection.TypeKind.ENUM: [],
-            graphql.type.introspection.TypeKind.UNION: [],
-            graphql.type.introspection.TypeKind.INTERFACE: [],
-            graphql.type.introspection.TypeKind.OBJECT: [],
-            graphql.type.introspection.TypeKind.INPUT_OBJECT: [],
-        }
-
-        # may be as a parameter in the future (??)
-        _mapper_from_graphql_type_to_parser_method = {
-            graphql.type.introspection.TypeKind.SCALAR: self.parse_scalar,
-            graphql.type.introspection.TypeKind.ENUM: self.parse_enum,
-            graphql.type.introspection.TypeKind.INTERFACE: self.parse_interface,
-            graphql.type.introspection.TypeKind.OBJECT: self.parse_object,
-            graphql.type.introspection.TypeKind.INPUT_OBJECT: self.parse_input_object,
-            graphql.type.introspection.TypeKind.UNION: self.parse_union,
-        }
-
-        for source, path_parts in self._get_context_source_path_parts():
-            schema: graphql.GraphQLSchema = build_graphql_schema(source.text)
-            self.raw_obj = schema
-
-            self._resolve_types(path_parts, schema)
-
-            for next_type in self.parse_order:
-                for obj in self.support_graphql_types[next_type]:
-                    parser_ = _mapper_from_graphql_type_to_parser_method[next_type]
-                    parser_(obj)  # type: ignore
diff -pruN 0.26.4-3/datamodel_code_generator/parser/jsonschema.py 0.34.0-1/datamodel_code_generator/parser/jsonschema.py
--- 0.26.4-3/datamodel_code_generator/parser/jsonschema.py	2024-12-15 17:25:57.706037000 +0000
+++ 0.34.0-1/datamodel_code_generator/parser/jsonschema.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,1846 +0,0 @@
-from __future__ import annotations
-
-import enum as _enum
-from collections import defaultdict
-from contextlib import contextmanager
-from functools import lru_cache
-from pathlib import Path
-from typing import (
-    TYPE_CHECKING,
-    Any,
-    Callable,
-    ClassVar,
-    DefaultDict,
-    Dict,
-    Generator,
-    Iterable,
-    Iterator,
-    List,
-    Mapping,
-    Optional,
-    Sequence,
-    Set,
-    Tuple,
-    Type,
-    Union,
-)
-from urllib.parse import ParseResult
-from warnings import warn
-
-from pydantic import (
-    Field,
-)
-
-from datamodel_code_generator import (
-    InvalidClassNameError,
-    load_yaml,
-    load_yaml_from_path,
-    snooper_to_methods,
-)
-from datamodel_code_generator.format import PythonVersion
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model import pydantic as pydantic_model
-from datamodel_code_generator.model.base import UNDEFINED, get_module_name
-from datamodel_code_generator.model.enum import Enum
-from datamodel_code_generator.parser import DefaultPutDict, LiteralType
-from datamodel_code_generator.parser.base import (
-    SPECIAL_PATH_FORMAT,
-    Parser,
-    Source,
-    escape_characters,
-    get_special_path,
-    title_to_class_name,
-)
-from datamodel_code_generator.reference import ModelType, Reference, is_url
-from datamodel_code_generator.types import (
-    DataType,
-    DataTypeManager,
-    EmptyDataType,
-    StrictTypes,
-    Types,
-    UnionIntFloat,
-)
-from datamodel_code_generator.util import (
-    PYDANTIC_V2,
-    BaseModel,
-    cached_property,
-    field_validator,
-    model_validator,
-)
-
-if PYDANTIC_V2:
-    from pydantic import ConfigDict
-
-from datamodel_code_generator.format import DatetimeClassType
-
-
-def get_model_by_path(
-    schema: Union[Dict[str, Any], List[Any]], keys: Union[List[str], List[int]]
-) -> Dict[Any, Any]:
-    model: Union[Dict[Any, Any], List[Any]]
-    if not keys:
-        model = schema
-    elif len(keys) == 1:
-        if isinstance(schema, dict):
-            model = schema.get(keys[0], {})  # type: ignore
-        else:  # pragma: no cover
-            model = schema[int(keys[0])]
-    elif isinstance(schema, dict):
-        model = get_model_by_path(schema[keys[0]], keys[1:])  # type: ignore
-    else:
-        model = get_model_by_path(schema[int(keys[0])], keys[1:])
-    if isinstance(model, dict):
-        return model
-    raise NotImplementedError(  # pragma: no cover
-        f'Does not support json pointer to array. schema={schema}, key={keys}'
-    )
-
-
-json_schema_data_formats: Dict[str, Dict[str, Types]] = {
-    'integer': {
-        'int32': Types.int32,
-        'int64': Types.int64,
-        'default': Types.integer,
-        'date-time': Types.date_time,
-        'unix-time': Types.int64,
-    },
-    'number': {
-        'float': Types.float,
-        'double': Types.double,
-        'decimal': Types.decimal,
-        'date-time': Types.date_time,
-        'time': Types.time,
-        'default': Types.number,
-    },
-    'string': {
-        'default': Types.string,
-        'byte': Types.byte,  # base64 encoded string
-        'binary': Types.binary,
-        'date': Types.date,
-        'date-time': Types.date_time,
-        'duration': Types.timedelta,
-        'time': Types.time,
-        'password': Types.password,
-        'path': Types.path,
-        'email': Types.email,
-        'idn-email': Types.email,
-        'uuid': Types.uuid,
-        'uuid1': Types.uuid1,
-        'uuid2': Types.uuid2,
-        'uuid3': Types.uuid3,
-        'uuid4': Types.uuid4,
-        'uuid5': Types.uuid5,
-        'uri': Types.uri,
-        'uri-reference': Types.string,
-        'hostname': Types.hostname,
-        'ipv4': Types.ipv4,
-        'ipv4-network': Types.ipv4_network,
-        'ipv6': Types.ipv6,
-        'ipv6-network': Types.ipv6_network,
-        'decimal': Types.decimal,
-        'integer': Types.integer,
-    },
-    'boolean': {'default': Types.boolean},
-    'object': {'default': Types.object},
-    'null': {'default': Types.null},
-    'array': {'default': Types.array},
-}
-
-
-class JSONReference(_enum.Enum):
-    LOCAL = 'LOCAL'
-    REMOTE = 'REMOTE'
-    URL = 'URL'
-
-
-class Discriminator(BaseModel):
-    propertyName: str
-    mapping: Optional[Dict[str, str]] = None
-
-
-class JsonSchemaObject(BaseModel):
-    if not TYPE_CHECKING:
-        if PYDANTIC_V2:
-
-            @classmethod
-            def get_fields(cls) -> Dict[str, Any]:
-                return cls.model_fields
-
-        else:
-
-            @classmethod
-            def get_fields(cls) -> Dict[str, Any]:
-                return cls.__fields__
-
-            @classmethod
-            def model_rebuild(cls) -> None:
-                cls.update_forward_refs()
-
-    __constraint_fields__: Set[str] = {
-        'exclusiveMinimum',
-        'minimum',
-        'exclusiveMaximum',
-        'maximum',
-        'multipleOf',
-        'minItems',
-        'maxItems',
-        'minLength',
-        'maxLength',
-        'pattern',
-        'uniqueItems',
-    }
-    __extra_key__: str = SPECIAL_PATH_FORMAT.format('extras')
-
-    @model_validator(mode='before')
-    def validate_exclusive_maximum_and_exclusive_minimum(cls, values: Any) -> Any:
-        if not isinstance(values, dict):
-            return values
-        exclusive_maximum: Union[float, bool, None] = values.get('exclusiveMaximum')
-        exclusive_minimum: Union[float, bool, None] = values.get('exclusiveMinimum')
-
-        if exclusive_maximum is True:
-            values['exclusiveMaximum'] = values['maximum']
-            del values['maximum']
-        elif exclusive_maximum is False:
-            del values['exclusiveMaximum']
-        if exclusive_minimum is True:
-            values['exclusiveMinimum'] = values['minimum']
-            del values['minimum']
-        elif exclusive_minimum is False:
-            del values['exclusiveMinimum']
-        return values
-
-    @field_validator('ref')
-    def validate_ref(cls, value: Any) -> Any:
-        if isinstance(value, str) and '#' in value:
-            if value.endswith('#/'):
-                return value[:-1]
-            elif '#/' in value or value[0] == '#' or value[-1] == '#':
-                return value
-            return value.replace('#', '#/')
-        return value
-
-    items: Union[List[JsonSchemaObject], JsonSchemaObject, bool, None] = None
-    uniqueItems: Optional[bool] = None
-    type: Union[str, List[str], None] = None
-    format: Optional[str] = None
-    pattern: Optional[str] = None
-    minLength: Optional[int] = None
-    maxLength: Optional[int] = None
-    minimum: Optional[UnionIntFloat] = None
-    maximum: Optional[UnionIntFloat] = None
-    minItems: Optional[int] = None
-    maxItems: Optional[int] = None
-    multipleOf: Optional[float] = None
-    exclusiveMaximum: Union[float, bool, None] = None
-    exclusiveMinimum: Union[float, bool, None] = None
-    additionalProperties: Union[JsonSchemaObject, bool, None] = None
-    patternProperties: Optional[Dict[str, JsonSchemaObject]] = None
-    oneOf: List[JsonSchemaObject] = []
-    anyOf: List[JsonSchemaObject] = []
-    allOf: List[JsonSchemaObject] = []
-    enum: List[Any] = []
-    writeOnly: Optional[bool] = None
-    readOnly: Optional[bool] = None
-    properties: Optional[Dict[str, Union[JsonSchemaObject, bool]]] = None
-    required: List[str] = []
-    ref: Optional[str] = Field(default=None, alias='$ref')
-    nullable: Optional[bool] = False
-    x_enum_varnames: List[str] = Field(default=[], alias='x-enum-varnames')
-    description: Optional[str] = None
-    title: Optional[str] = None
-    example: Any = None
-    examples: Any = None
-    default: Any = None
-    id: Optional[str] = Field(default=None, alias='$id')
-    custom_type_path: Optional[str] = Field(default=None, alias='customTypePath')
-    custom_base_path: Optional[str] = Field(default=None, alias='customBasePath')
-    extras: Dict[str, Any] = Field(alias=__extra_key__, default_factory=dict)
-    discriminator: Union[Discriminator, str, None] = None
-    if PYDANTIC_V2:
-        model_config = ConfigDict(
-            arbitrary_types_allowed=True,
-            ignored_types=(cached_property,),
-        )
-    else:
-
-        class Config:
-            arbitrary_types_allowed = True
-            keep_untouched = (cached_property,)
-            smart_casts = True
-
-    if not TYPE_CHECKING:
-
-        def __init__(self, **data: Any) -> None:
-            super().__init__(**data)
-            self.extras = {k: v for k, v in data.items() if k not in EXCLUDE_FIELD_KEYS}
-            if 'const' in data.get(self.__extra_key__, {}):
-                self.extras['const'] = data[self.__extra_key__]['const']
-
-    @cached_property
-    def is_object(self) -> bool:
-        return (
-            self.properties is not None
-            or self.type == 'object'
-            and not self.allOf
-            and not self.oneOf
-            and not self.anyOf
-            and not self.ref
-        )
-
-    @cached_property
-    def is_array(self) -> bool:
-        return self.items is not None or self.type == 'array'
-
-    @cached_property
-    def ref_object_name(self) -> str:  # pragma: no cover
-        return self.ref.rsplit('/', 1)[-1]  # type: ignore
-
-    @field_validator('items', mode='before')
-    def validate_items(cls, values: Any) -> Any:
-        # this condition expects empty dict
-        return values or None
-
-    @cached_property
-    def has_default(self) -> bool:
-        return 'default' in self.__fields_set__ or 'default_factory' in self.extras
-
-    @cached_property
-    def has_constraint(self) -> bool:
-        return bool(self.__constraint_fields__ & self.__fields_set__)
-
-    @cached_property
-    def ref_type(self) -> Optional[JSONReference]:
-        if self.ref:
-            return get_ref_type(self.ref)
-        return None  # pragma: no cover
-
-    @cached_property
-    def type_has_null(self) -> bool:
-        return isinstance(self.type, list) and 'null' in self.type
-
-
-@lru_cache()
-def get_ref_type(ref: str) -> JSONReference:
-    if ref[0] == '#':
-        return JSONReference.LOCAL
-    elif is_url(ref):
-        return JSONReference.URL
-    return JSONReference.REMOTE
-
-
-def _get_type(type_: str, format__: Optional[str] = None) -> Types:
-    if type_ not in json_schema_data_formats:
-        return Types.any
-    data_formats: Optional[Types] = json_schema_data_formats[type_].get(
-        'default' if format__ is None else format__
-    )
-    if data_formats is not None:
-        return data_formats
-
-    warn(f'format of {format__!r} not understood for {type_!r} - using default' '')
-    return json_schema_data_formats[type_]['default']
-
-
-JsonSchemaObject.model_rebuild()
-
-DEFAULT_FIELD_KEYS: Set[str] = {
-    'example',
-    'examples',
-    'description',
-    'discriminator',
-    'title',
-    'const',
-    'default_factory',
-}
-
-EXCLUDE_FIELD_KEYS_IN_JSON_SCHEMA: Set[str] = {
-    'readOnly',
-    'writeOnly',
-}
-
-EXCLUDE_FIELD_KEYS = (
-    set(JsonSchemaObject.get_fields())
-    - DEFAULT_FIELD_KEYS
-    - EXCLUDE_FIELD_KEYS_IN_JSON_SCHEMA
-) | {
-    '$id',
-    '$ref',
-    JsonSchemaObject.__extra_key__,
-}
-
-
-@snooper_to_methods(max_variable_length=None)
-class JsonSchemaParser(Parser):
-    SCHEMA_PATHS: ClassVar[List[str]] = ['#/definitions', '#/$defs']
-    SCHEMA_OBJECT_TYPE: ClassVar[Type[JsonSchemaObject]] = JsonSchemaObject
-
-    def __init__(
-        self,
-        source: Union[str, Path, List[Path], ParseResult],
-        *,
-        data_model_type: Type[DataModel] = pydantic_model.BaseModel,
-        data_model_root_type: Type[DataModel] = pydantic_model.CustomRootType,
-        data_type_manager_type: Type[DataTypeManager] = pydantic_model.DataTypeManager,
-        data_model_field_type: Type[DataModelFieldBase] = pydantic_model.DataModelField,
-        base_class: Optional[str] = None,
-        additional_imports: Optional[List[str]] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        target_python_version: PythonVersion = PythonVersion.PY_38,
-        dump_resolve_reference_action: Optional[Callable[[Iterable[str]], str]] = None,
-        validation: bool = False,
-        field_constraints: bool = False,
-        snake_case_field: bool = False,
-        strip_default_none: bool = False,
-        aliases: Optional[Mapping[str, str]] = None,
-        allow_population_by_field_name: bool = False,
-        apply_default_values_for_required_fields: bool = False,
-        allow_extra_fields: bool = False,
-        force_optional_for_required_fields: bool = False,
-        class_name: Optional[str] = None,
-        use_standard_collections: bool = False,
-        base_path: Optional[Path] = None,
-        use_schema_description: bool = False,
-        use_field_description: bool = False,
-        use_default_kwarg: bool = False,
-        reuse_model: bool = False,
-        encoding: str = 'utf-8',
-        enum_field_as_literal: Optional[LiteralType] = None,
-        use_one_literal_as_default: bool = False,
-        set_default_enum_member: bool = False,
-        use_subclass_enum: bool = False,
-        strict_nullable: bool = False,
-        use_generic_container_types: bool = False,
-        enable_faux_immutability: bool = False,
-        remote_text_cache: Optional[DefaultPutDict[str, str]] = None,
-        disable_appending_item_suffix: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        empty_enum_field_name: Optional[str] = None,
-        custom_class_name_generator: Optional[Callable[[str], str]] = None,
-        field_extra_keys: Optional[Set[str]] = None,
-        field_include_all_keys: bool = False,
-        field_extra_keys_without_x_prefix: Optional[Set[str]] = None,
-        wrap_string_literal: Optional[bool] = None,
-        use_title_as_name: bool = False,
-        use_operation_id_as_name: bool = False,
-        use_unique_items_as_set: bool = False,
-        http_headers: Optional[Sequence[Tuple[str, str]]] = None,
-        http_ignore_tls: bool = False,
-        use_annotated: bool = False,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        original_field_name_delimiter: Optional[str] = None,
-        use_double_quotes: bool = False,
-        use_union_operator: bool = False,
-        allow_responses_without_content: bool = False,
-        collapse_root_models: bool = False,
-        special_field_name_prefix: Optional[str] = None,
-        remove_special_field_name_prefix: bool = False,
-        capitalise_enum_members: bool = False,
-        keep_model_order: bool = False,
-        known_third_party: Optional[List[str]] = None,
-        custom_formatters: Optional[List[str]] = None,
-        custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
-        use_pendulum: bool = False,
-        http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
-        treat_dots_as_module: bool = False,
-        use_exact_imports: bool = False,
-        default_field_extras: Optional[Dict[str, Any]] = None,
-        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
-        keyword_only: bool = False,
-        no_alias: bool = False,
-    ) -> None:
-        super().__init__(
-            source=source,
-            data_model_type=data_model_type,
-            data_model_root_type=data_model_root_type,
-            data_type_manager_type=data_type_manager_type,
-            data_model_field_type=data_model_field_type,
-            base_class=base_class,
-            additional_imports=additional_imports,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            target_python_version=target_python_version,
-            dump_resolve_reference_action=dump_resolve_reference_action,
-            validation=validation,
-            field_constraints=field_constraints,
-            snake_case_field=snake_case_field,
-            strip_default_none=strip_default_none,
-            aliases=aliases,
-            allow_population_by_field_name=allow_population_by_field_name,
-            allow_extra_fields=allow_extra_fields,
-            apply_default_values_for_required_fields=apply_default_values_for_required_fields,
-            force_optional_for_required_fields=force_optional_for_required_fields,
-            class_name=class_name,
-            use_standard_collections=use_standard_collections,
-            base_path=base_path,
-            use_schema_description=use_schema_description,
-            use_field_description=use_field_description,
-            use_default_kwarg=use_default_kwarg,
-            reuse_model=reuse_model,
-            encoding=encoding,
-            enum_field_as_literal=enum_field_as_literal,
-            use_one_literal_as_default=use_one_literal_as_default,
-            set_default_enum_member=set_default_enum_member,
-            use_subclass_enum=use_subclass_enum,
-            strict_nullable=strict_nullable,
-            use_generic_container_types=use_generic_container_types,
-            enable_faux_immutability=enable_faux_immutability,
-            remote_text_cache=remote_text_cache,
-            disable_appending_item_suffix=disable_appending_item_suffix,
-            strict_types=strict_types,
-            empty_enum_field_name=empty_enum_field_name,
-            custom_class_name_generator=custom_class_name_generator,
-            field_extra_keys=field_extra_keys,
-            field_include_all_keys=field_include_all_keys,
-            field_extra_keys_without_x_prefix=field_extra_keys_without_x_prefix,
-            wrap_string_literal=wrap_string_literal,
-            use_title_as_name=use_title_as_name,
-            use_operation_id_as_name=use_operation_id_as_name,
-            use_unique_items_as_set=use_unique_items_as_set,
-            http_headers=http_headers,
-            http_ignore_tls=http_ignore_tls,
-            use_annotated=use_annotated,
-            use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
-            original_field_name_delimiter=original_field_name_delimiter,
-            use_double_quotes=use_double_quotes,
-            use_union_operator=use_union_operator,
-            allow_responses_without_content=allow_responses_without_content,
-            collapse_root_models=collapse_root_models,
-            special_field_name_prefix=special_field_name_prefix,
-            remove_special_field_name_prefix=remove_special_field_name_prefix,
-            capitalise_enum_members=capitalise_enum_members,
-            keep_model_order=keep_model_order,
-            known_third_party=known_third_party,
-            custom_formatters=custom_formatters,
-            custom_formatters_kwargs=custom_formatters_kwargs,
-            use_pendulum=use_pendulum,
-            http_query_parameters=http_query_parameters,
-            treat_dots_as_module=treat_dots_as_module,
-            use_exact_imports=use_exact_imports,
-            default_field_extras=default_field_extras,
-            target_datetime_class=target_datetime_class,
-            keyword_only=keyword_only,
-            no_alias=no_alias,
-        )
-
-        self.remote_object_cache: DefaultPutDict[str, Dict[str, Any]] = DefaultPutDict()
-        self.raw_obj: Dict[Any, Any] = {}
-        self._root_id: Optional[str] = None
-        self._root_id_base_path: Optional[str] = None
-        self.reserved_refs: DefaultDict[Tuple[str], Set[str]] = defaultdict(set)
-        self.field_keys: Set[str] = {
-            *DEFAULT_FIELD_KEYS,
-            *self.field_extra_keys,
-            *self.field_extra_keys_without_x_prefix,
-        }
-
-        if self.data_model_field_type.can_have_extra_keys:
-            self.get_field_extra_key: Callable[[str], str] = (
-                lambda key: self.model_resolver.get_valid_field_name_and_alias(key)[0]
-            )
-
-        else:
-            self.get_field_extra_key = lambda key: key
-
-    def get_field_extras(self, obj: JsonSchemaObject) -> Dict[str, Any]:
-        if self.field_include_all_keys:
-            extras = {
-                self.get_field_extra_key(
-                    k.lstrip('x-') if k in self.field_extra_keys_without_x_prefix else k
-                ): v
-                for k, v in obj.extras.items()
-            }
-        else:
-            extras = {
-                self.get_field_extra_key(
-                    k.lstrip('x-') if k in self.field_extra_keys_without_x_prefix else k
-                ): v
-                for k, v in obj.extras.items()
-                if k in self.field_keys
-            }
-        if self.default_field_extras:
-            extras.update(self.default_field_extras)
-        return extras
-
-    @cached_property
-    def schema_paths(self) -> List[Tuple[str, List[str]]]:
-        return [(s, s.lstrip('#/').split('/')) for s in self.SCHEMA_PATHS]
-
-    @property
-    def root_id(self) -> Optional[str]:
-        return self.model_resolver.root_id
-
-    @root_id.setter
-    def root_id(self, value: Optional[str]) -> None:
-        self.model_resolver.set_root_id(value)
-
-    def should_parse_enum_as_literal(self, obj: JsonSchemaObject) -> bool:
-        return self.enum_field_as_literal == LiteralType.All or (
-            self.enum_field_as_literal == LiteralType.One and len(obj.enum) == 1
-        )
-
-    def is_constraints_field(self, obj: JsonSchemaObject) -> bool:
-        return obj.is_array or (
-            self.field_constraints
-            and not (
-                obj.ref
-                or obj.anyOf
-                or obj.oneOf
-                or obj.allOf
-                or obj.is_object
-                or obj.enum
-            )
-        )
-
-    def get_object_field(
-        self,
-        *,
-        field_name: Optional[str],
-        field: JsonSchemaObject,
-        required: bool,
-        field_type: DataType,
-        alias: Optional[str],
-        original_field_name: Optional[str],
-    ) -> DataModelFieldBase:
-        return self.data_model_field_type(
-            name=field_name,
-            default=field.default,
-            data_type=field_type,
-            required=required,
-            alias=alias,
-            constraints=field.dict() if self.is_constraints_field(field) else None,
-            nullable=field.nullable
-            if self.strict_nullable and (field.has_default or required)
-            else None,
-            strip_default_none=self.strip_default_none,
-            extras=self.get_field_extras(field),
-            use_annotated=self.use_annotated,
-            use_field_description=self.use_field_description,
-            use_default_kwarg=self.use_default_kwarg,
-            original_name=original_field_name,
-            has_default=field.has_default,
-            type_has_null=field.type_has_null,
-        )
-
-    def get_data_type(self, obj: JsonSchemaObject) -> DataType:
-        if obj.type is None:
-            if 'const' in obj.extras:
-                return self.data_type_manager.get_data_type_from_value(
-                    obj.extras['const']
-                )
-            return self.data_type_manager.get_data_type(
-                Types.any,
-            )
-
-        def _get_data_type(type_: str, format__: str) -> DataType:
-            return self.data_type_manager.get_data_type(
-                _get_type(type_, format__),
-                **obj.dict() if not self.field_constraints else {},
-            )
-
-        if isinstance(obj.type, list):
-            return self.data_type(
-                data_types=[
-                    _get_data_type(t, obj.format or 'default')
-                    for t in obj.type
-                    if t != 'null'
-                ],
-                is_optional='null' in obj.type,
-            )
-        return _get_data_type(obj.type, obj.format or 'default')
-
-    def get_ref_data_type(self, ref: str) -> DataType:
-        reference = self.model_resolver.add_ref(ref)
-        return self.data_type(reference=reference)
-
-    def set_additional_properties(self, name: str, obj: JsonSchemaObject) -> None:
-        if isinstance(obj.additionalProperties, bool):
-            self.extra_template_data[name]['additionalProperties'] = (
-                obj.additionalProperties
-            )
-
-    def set_title(self, name: str, obj: JsonSchemaObject) -> None:
-        if obj.title:
-            self.extra_template_data[name]['title'] = obj.title
-
-    def _deep_merge(
-        self, dict1: Dict[Any, Any], dict2: Dict[Any, Any]
-    ) -> Dict[Any, Any]:
-        result = dict1.copy()
-        for key, value in dict2.items():
-            if key in result:
-                if isinstance(result[key], dict) and isinstance(value, dict):
-                    result[key] = self._deep_merge(result[key], value)
-                    continue
-                elif isinstance(result[key], list) and isinstance(value, list):
-                    result[key] = result[key] + value
-                    continue
-            result[key] = value
-        return result
-
-    def parse_combined_schema(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-        target_attribute_name: str,
-    ) -> List[DataType]:
-        base_object = obj.dict(
-            exclude={target_attribute_name}, exclude_unset=True, by_alias=True
-        )
-        combined_schemas: List[JsonSchemaObject] = []
-        refs = []
-        for index, target_attribute in enumerate(
-            getattr(obj, target_attribute_name, [])
-        ):
-            if target_attribute.ref:
-                combined_schemas.append(target_attribute)
-                refs.append(index)
-                # TODO: support partial ref
-                # {
-                #   "type": "integer",
-                #   "oneOf": [
-                #     { "minimum": 5 },
-                #     { "$ref": "#/definitions/positive" }
-                #   ],
-                #    "definitions": {
-                #     "positive": {
-                #       "minimum": 0,
-                #       "exclusiveMinimum": true
-                #     }
-                #    }
-                # }
-            else:
-                combined_schemas.append(
-                    self.SCHEMA_OBJECT_TYPE.parse_obj(
-                        self._deep_merge(
-                            base_object,
-                            target_attribute.dict(exclude_unset=True, by_alias=True),
-                        )
-                    )
-                )
-
-        parsed_schemas = self.parse_list_item(
-            name,
-            combined_schemas,
-            path,
-            obj,
-            singular_name=False,
-        )
-        common_path_keyword = f'{target_attribute_name}Common'
-        return [
-            self._parse_object_common_part(
-                name,
-                obj,
-                [*get_special_path(common_path_keyword, path), str(i)],
-                ignore_duplicate_model=True,
-                fields=[],
-                base_classes=[d.reference],
-                required=[],
-            )
-            if i in refs and d.reference
-            else d
-            for i, d in enumerate(parsed_schemas)
-        ]
-
-    def parse_any_of(
-        self, name: str, obj: JsonSchemaObject, path: List[str]
-    ) -> List[DataType]:
-        return self.parse_combined_schema(name, obj, path, 'anyOf')
-
-    def parse_one_of(
-        self, name: str, obj: JsonSchemaObject, path: List[str]
-    ) -> List[DataType]:
-        return self.parse_combined_schema(name, obj, path, 'oneOf')
-
-    def _parse_object_common_part(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-        ignore_duplicate_model: bool,
-        fields: List[DataModelFieldBase],
-        base_classes: List[Reference],
-        required: List[str],
-    ) -> DataType:
-        if obj.properties:
-            fields.extend(
-                self.parse_object_fields(obj, path, get_module_name(name, None))
-            )
-        # ignore an undetected object
-        if ignore_duplicate_model and not fields and len(base_classes) == 1:
-            with self.model_resolver.current_base_path_context(
-                self.model_resolver._base_path
-            ):
-                self.model_resolver.delete(path)
-                return self.data_type(reference=base_classes[0])
-        if required:
-            for field in fields:
-                if self.force_optional_for_required_fields or (  # pragma: no cover
-                    self.apply_default_values_for_required_fields and field.has_default
-                ):
-                    continue  # pragma: no cover
-                if (field.original_name or field.name) in required:
-                    field.required = True
-        if obj.required:
-            field_name_to_field = {f.original_name or f.name: f for f in fields}
-            for required_ in obj.required:
-                if required_ in field_name_to_field:
-                    field = field_name_to_field[required_]
-                    if self.force_optional_for_required_fields or (
-                        self.apply_default_values_for_required_fields
-                        and field.has_default
-                    ):
-                        continue
-                    field.required = True
-                else:
-                    fields.append(
-                        self.data_model_field_type(
-                            required=True, original_name=required_, data_type=DataType()
-                        )
-                    )
-        if self.use_title_as_name and obj.title:  # pragma: no cover
-            name = obj.title
-        reference = self.model_resolver.add(path, name, class_name=True, loaded=True)
-        self.set_additional_properties(reference.name, obj)
-        data_model_type = self.data_model_type(
-            reference=reference,
-            fields=fields,
-            base_classes=base_classes,
-            custom_base_class=obj.custom_base_path or self.base_class,
-            custom_template_dir=self.custom_template_dir,
-            extra_template_data=self.extra_template_data,
-            path=self.current_source_path,
-            description=obj.description if self.use_schema_description else None,
-            keyword_only=self.keyword_only,
-        )
-        self.results.append(data_model_type)
-
-        return self.data_type(reference=reference)
-
-    def _parse_all_of_item(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-        fields: List[DataModelFieldBase],
-        base_classes: List[Reference],
-        required: List[str],
-        union_models: List[Reference],
-    ) -> None:
-        for all_of_item in obj.allOf:
-            if all_of_item.ref:  # $ref
-                base_classes.append(self.model_resolver.add_ref(all_of_item.ref))
-            else:
-                module_name = get_module_name(name, None)
-                object_fields = self.parse_object_fields(
-                    all_of_item,
-                    path,
-                    module_name,
-                )
-
-                if object_fields:
-                    fields.extend(object_fields)
-                else:
-                    if all_of_item.required:
-                        required.extend(all_of_item.required)
-                self._parse_all_of_item(
-                    name,
-                    all_of_item,
-                    path,
-                    fields,
-                    base_classes,
-                    required,
-                    union_models,
-                )
-                if all_of_item.anyOf:
-                    self.model_resolver.add(path, name, class_name=True, loaded=True)
-                    union_models.extend(
-                        d.reference
-                        for d in self.parse_any_of(name, all_of_item, path)
-                        if d.reference
-                    )
-                if all_of_item.oneOf:
-                    self.model_resolver.add(path, name, class_name=True, loaded=True)
-                    union_models.extend(
-                        d.reference
-                        for d in self.parse_one_of(name, all_of_item, path)
-                        if d.reference
-                    )
-
-    def parse_all_of(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-        ignore_duplicate_model: bool = False,
-    ) -> DataType:
-        if len(obj.allOf) == 1 and not obj.properties:
-            single_obj = obj.allOf[0]
-            if single_obj.ref and single_obj.ref_type == JSONReference.LOCAL:
-                if get_model_by_path(self.raw_obj, single_obj.ref[2:].split('/')).get(
-                    'enum'
-                ):
-                    return self.get_ref_data_type(single_obj.ref)
-        fields: List[DataModelFieldBase] = []
-        base_classes: List[Reference] = []
-        required: List[str] = []
-        union_models: List[Reference] = []
-        self._parse_all_of_item(
-            name, obj, path, fields, base_classes, required, union_models
-        )
-        if not union_models:
-            return self._parse_object_common_part(
-                name, obj, path, ignore_duplicate_model, fields, base_classes, required
-            )
-        reference = self.model_resolver.add(path, name, class_name=True, loaded=True)
-        all_of_data_type = self._parse_object_common_part(
-            name,
-            obj,
-            get_special_path('allOf', path),
-            ignore_duplicate_model,
-            fields,
-            base_classes,
-            required,
-        )
-        data_type = self.data_type(
-            data_types=[
-                self._parse_object_common_part(
-                    name,
-                    obj,
-                    get_special_path(f'union_model-{index}', path),
-                    ignore_duplicate_model,
-                    [],
-                    [union_model, all_of_data_type.reference],  # type: ignore
-                    [],
-                )
-                for index, union_model in enumerate(union_models)
-            ]
-        )
-        field = self.get_object_field(
-            field_name=None,
-            field=obj,
-            required=True,
-            field_type=data_type,
-            alias=None,
-            original_field_name=None,
-        )
-        data_model_root = self.data_model_root_type(
-            reference=reference,
-            fields=[field],
-            custom_base_class=obj.custom_base_path or self.base_class,
-            custom_template_dir=self.custom_template_dir,
-            extra_template_data=self.extra_template_data,
-            path=self.current_source_path,
-            description=obj.description if self.use_schema_description else None,
-            nullable=obj.type_has_null,
-        )
-        self.results.append(data_model_root)
-        return self.data_type(reference=reference)
-
-    def parse_object_fields(
-        self, obj: JsonSchemaObject, path: List[str], module_name: Optional[str] = None
-    ) -> List[DataModelFieldBase]:
-        properties: Dict[str, Union[JsonSchemaObject, bool]] = (
-            {} if obj.properties is None else obj.properties
-        )
-        requires: Set[str] = {*()} if obj.required is None else {*obj.required}
-        fields: List[DataModelFieldBase] = []
-
-        exclude_field_names: Set[str] = set()
-        for original_field_name, field in properties.items():
-            field_name, alias = self.model_resolver.get_valid_field_name_and_alias(
-                original_field_name, exclude_field_names
-            )
-            modular_name = f'{module_name}.{field_name}' if module_name else field_name
-
-            exclude_field_names.add(field_name)
-
-            if isinstance(field, bool):
-                fields.append(
-                    self.data_model_field_type(
-                        name=field_name,
-                        data_type=self.data_type_manager.get_data_type(
-                            Types.any,
-                        ),
-                        required=False
-                        if self.force_optional_for_required_fields
-                        else original_field_name in requires,
-                        alias=alias,
-                        strip_default_none=self.strip_default_none,
-                        use_annotated=self.use_annotated,
-                        use_field_description=self.use_field_description,
-                        original_name=original_field_name,
-                    )
-                )
-                continue
-
-            field_type = self.parse_item(modular_name, field, [*path, field_name])
-
-            if self.force_optional_for_required_fields or (
-                self.apply_default_values_for_required_fields and field.has_default
-            ):
-                required: bool = False
-            else:
-                required = original_field_name in requires
-            fields.append(
-                self.get_object_field(
-                    field_name=field_name,
-                    field=field,
-                    required=required,
-                    field_type=field_type,
-                    alias=alias,
-                    original_field_name=original_field_name,
-                )
-            )
-        return fields
-
-    def parse_object(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-        singular_name: bool = False,
-        unique: bool = True,
-    ) -> DataType:
-        if not unique:  # pragma: no cover
-            warn(
-                f'{self.__class__.__name__}.parse_object() ignore `unique` argument.'
-                f'An object name must be unique.'
-                f'This argument will be removed in a future version'
-            )
-        if self.use_title_as_name and obj.title:
-            name = obj.title
-        reference = self.model_resolver.add(
-            path,
-            name,
-            class_name=True,
-            singular_name=singular_name,
-            loaded=True,
-        )
-        class_name = reference.name
-        self.set_title(class_name, obj)
-        fields = self.parse_object_fields(obj, path, get_module_name(class_name, None))
-        if fields or not isinstance(obj.additionalProperties, JsonSchemaObject):
-            data_model_type_class = self.data_model_type
-        else:
-            fields.append(
-                self.get_object_field(
-                    field_name=None,
-                    field=obj.additionalProperties,
-                    required=False,
-                    original_field_name=None,
-                    field_type=self.data_type(
-                        data_types=[
-                            self.parse_item(
-                                # TODO: Improve naming for nested ClassName
-                                name,
-                                obj.additionalProperties,
-                                [*path, 'additionalProperties'],
-                            )
-                        ],
-                        is_dict=True,
-                    ),
-                    alias=None,
-                )
-            )
-            data_model_type_class = self.data_model_root_type
-
-        self.set_additional_properties(class_name, obj)
-        data_model_type = data_model_type_class(
-            reference=reference,
-            fields=fields,
-            custom_base_class=obj.custom_base_path or self.base_class,
-            custom_template_dir=self.custom_template_dir,
-            extra_template_data=self.extra_template_data,
-            path=self.current_source_path,
-            description=obj.description if self.use_schema_description else None,
-            nullable=obj.type_has_null,
-            keyword_only=self.keyword_only,
-        )
-        self.results.append(data_model_type)
-        return self.data_type(reference=reference)
-
-    def parse_pattern_properties(
-        self,
-        name: str,
-        pattern_properties: Dict[str, JsonSchemaObject],
-        path: List[str],
-    ) -> DataType:
-        return self.data_type(
-            data_types=[
-                self.data_type(
-                    data_types=[
-                        self.parse_item(
-                            name,
-                            kv[1],
-                            get_special_path(f'patternProperties/{i}', path),
-                        )
-                    ],
-                    is_dict=True,
-                    dict_key=self.data_type_manager.get_data_type(
-                        Types.string,
-                        pattern=kv[0] if not self.field_constraints else None,
-                    ),
-                )
-                for i, kv in enumerate(pattern_properties.items())
-            ],
-        )
-
-    def parse_item(
-        self,
-        name: str,
-        item: JsonSchemaObject,
-        path: List[str],
-        singular_name: bool = False,
-        parent: Optional[JsonSchemaObject] = None,
-    ) -> DataType:
-        if self.use_title_as_name and item.title:
-            name = item.title
-            singular_name = False
-        if (
-            parent
-            and not item.enum
-            and item.has_constraint
-            and (parent.has_constraint or self.field_constraints)
-        ):
-            root_type_path = get_special_path('array', path)
-            return self.parse_root_type(
-                self.model_resolver.add(
-                    root_type_path,
-                    name,
-                    class_name=True,
-                    singular_name=singular_name,
-                ).name,
-                item,
-                root_type_path,
-            )
-        elif item.ref:
-            return self.get_ref_data_type(item.ref)
-        elif item.custom_type_path:
-            return self.data_type_manager.get_data_type_from_full_path(
-                item.custom_type_path, is_custom_type=True
-            )
-        elif item.is_array:
-            return self.parse_array_fields(
-                name, item, get_special_path('array', path)
-            ).data_type
-        elif (
-            item.discriminator
-            and parent
-            and parent.is_array
-            and (item.oneOf or item.anyOf)
-        ):
-            return self.parse_root_type(name, item, path)
-        elif item.anyOf:
-            return self.data_type(
-                data_types=self.parse_any_of(
-                    name, item, get_special_path('anyOf', path)
-                )
-            )
-        elif item.oneOf:
-            return self.data_type(
-                data_types=self.parse_one_of(
-                    name, item, get_special_path('oneOf', path)
-                )
-            )
-        elif item.allOf:
-            all_of_path = get_special_path('allOf', path)
-            all_of_path = [self.model_resolver.resolve_ref(all_of_path)]
-            return self.parse_all_of(
-                self.model_resolver.add(
-                    all_of_path, name, singular_name=singular_name, class_name=True
-                ).name,
-                item,
-                all_of_path,
-                ignore_duplicate_model=True,
-            )
-        elif item.is_object or item.patternProperties:
-            object_path = get_special_path('object', path)
-            if item.properties:
-                return self.parse_object(
-                    name, item, object_path, singular_name=singular_name
-                )
-            elif item.patternProperties:
-                # support only single key dict.
-                return self.parse_pattern_properties(
-                    name, item.patternProperties, object_path
-                )
-            elif isinstance(item.additionalProperties, JsonSchemaObject):
-                return self.data_type(
-                    data_types=[
-                        self.parse_item(name, item.additionalProperties, object_path)
-                    ],
-                    is_dict=True,
-                )
-            return self.data_type_manager.get_data_type(
-                Types.object,
-            )
-        elif item.enum:
-            if self.should_parse_enum_as_literal(item):
-                return self.parse_enum_as_literal(item)
-            return self.parse_enum(
-                name, item, get_special_path('enum', path), singular_name=singular_name
-            )
-        return self.get_data_type(item)
-
-    def parse_list_item(
-        self,
-        name: str,
-        target_items: List[JsonSchemaObject],
-        path: List[str],
-        parent: JsonSchemaObject,
-        singular_name: bool = True,
-    ) -> List[DataType]:
-        return [
-            self.parse_item(
-                name,
-                item,
-                [*path, str(index)],
-                singular_name=singular_name,
-                parent=parent,
-            )
-            for index, item in enumerate(target_items)
-        ]
-
-    def parse_array_fields(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-        singular_name: bool = True,
-    ) -> DataModelFieldBase:
-        if self.force_optional_for_required_fields:
-            required: bool = False
-            nullable: Optional[bool] = None
-        else:
-            required = not (
-                obj.has_default and self.apply_default_values_for_required_fields
-            )
-            if self.strict_nullable:
-                nullable = obj.nullable if obj.has_default or required else True
-            else:
-                required = not obj.nullable and required
-                nullable = None
-        if isinstance(obj.items, JsonSchemaObject):
-            items: List[JsonSchemaObject] = [obj.items]
-        elif isinstance(obj.items, list):
-            items = obj.items
-        else:
-            items = []
-
-        data_types: List[DataType] = [
-            self.data_type(
-                data_types=self.parse_list_item(
-                    name,
-                    items,
-                    path,
-                    obj,
-                    singular_name=singular_name,
-                ),
-                is_list=True,
-            )
-        ]
-        # TODO: decide special path word for a combined data model.
-        if obj.allOf:
-            data_types.append(
-                self.parse_all_of(name, obj, get_special_path('allOf', path))
-            )
-        elif obj.is_object:
-            data_types.append(
-                self.parse_object(name, obj, get_special_path('object', path))
-            )
-        if obj.enum:
-            data_types.append(
-                self.parse_enum(name, obj, get_special_path('enum', path))
-            )
-        return self.data_model_field_type(
-            data_type=self.data_type(data_types=data_types),
-            default=obj.default,
-            required=required,
-            constraints=obj.dict(),
-            nullable=nullable,
-            strip_default_none=self.strip_default_none,
-            extras=self.get_field_extras(obj),
-            use_annotated=self.use_annotated,
-            use_field_description=self.use_field_description,
-            original_name=None,
-            has_default=obj.has_default,
-        )
-
-    def parse_array(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-        original_name: Optional[str] = None,
-    ) -> DataType:
-        if self.use_title_as_name and obj.title:
-            name = obj.title
-        reference = self.model_resolver.add(path, name, loaded=True, class_name=True)
-        field = self.parse_array_fields(original_name or name, obj, [*path, name])
-
-        if reference in [
-            d.reference for d in field.data_type.all_data_types if d.reference
-        ]:
-            # self-reference
-            field = self.data_model_field_type(
-                data_type=self.data_type(
-                    data_types=[
-                        self.data_type(
-                            data_types=field.data_type.data_types[1:], is_list=True
-                        ),
-                        *field.data_type.data_types[1:],
-                    ]
-                ),
-                default=field.default,
-                required=field.required,
-                constraints=field.constraints,
-                nullable=field.nullable,
-                strip_default_none=field.strip_default_none,
-                extras=field.extras,
-                use_annotated=self.use_annotated,
-                use_field_description=self.use_field_description,
-                original_name=None,
-                has_default=field.has_default,
-            )
-
-        data_model_root = self.data_model_root_type(
-            reference=reference,
-            fields=[field],
-            custom_base_class=obj.custom_base_path or self.base_class,
-            custom_template_dir=self.custom_template_dir,
-            extra_template_data=self.extra_template_data,
-            path=self.current_source_path,
-            description=obj.description if self.use_schema_description else None,
-            nullable=obj.type_has_null,
-        )
-        self.results.append(data_model_root)
-        return self.data_type(reference=reference)
-
-    def parse_root_type(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-    ) -> DataType:
-        reference: Optional[Reference] = None
-        if obj.ref:
-            data_type: DataType = self.get_ref_data_type(obj.ref)
-        elif obj.custom_type_path:
-            data_type = self.data_type_manager.get_data_type_from_full_path(
-                obj.custom_type_path, is_custom_type=True
-            )  # pragma: no cover
-        elif obj.is_array:
-            data_type = self.parse_array_fields(
-                name, obj, get_special_path('array', path)
-            ).data_type  # pragma: no cover
-        elif obj.anyOf or obj.oneOf:
-            reference = self.model_resolver.add(
-                path, name, loaded=True, class_name=True
-            )
-            if obj.anyOf:
-                data_types: List[DataType] = self.parse_any_of(
-                    name, obj, get_special_path('anyOf', path)
-                )
-            else:
-                data_types = self.parse_one_of(
-                    name, obj, get_special_path('oneOf', path)
-                )
-
-            if len(data_types) > 1:  # pragma: no cover
-                data_type = self.data_type(data_types=data_types)
-            elif not data_types:  # pragma: no cover
-                return EmptyDataType()
-            else:  # pragma: no cover
-                data_type = data_types[0]
-        elif obj.patternProperties:
-            data_type = self.parse_pattern_properties(name, obj.patternProperties, path)
-        elif obj.enum:
-            if self.should_parse_enum_as_literal(obj):
-                data_type = self.parse_enum_as_literal(obj)
-            else:  # pragma: no cover
-                data_type = self.parse_enum(name, obj, path)
-        elif obj.type:
-            data_type = self.get_data_type(obj)
-        else:
-            data_type = self.data_type_manager.get_data_type(
-                Types.any,
-            )
-        if self.force_optional_for_required_fields:
-            required: bool = False
-        else:
-            required = not obj.nullable and not (
-                obj.has_default and self.apply_default_values_for_required_fields
-            )
-        if self.use_title_as_name and obj.title:
-            name = obj.title
-        if not reference:
-            reference = self.model_resolver.add(
-                path, name, loaded=True, class_name=True
-            )
-        self.set_title(name, obj)
-        self.set_additional_properties(name, obj)
-        data_model_root_type = self.data_model_root_type(
-            reference=reference,
-            fields=[
-                self.data_model_field_type(
-                    data_type=data_type,
-                    default=obj.default,
-                    required=required,
-                    constraints=obj.dict() if self.field_constraints else {},
-                    nullable=obj.nullable if self.strict_nullable else None,
-                    strip_default_none=self.strip_default_none,
-                    extras=self.get_field_extras(obj),
-                    use_annotated=self.use_annotated,
-                    use_field_description=self.use_field_description,
-                    original_name=None,
-                    has_default=obj.has_default,
-                )
-            ],
-            custom_base_class=obj.custom_base_path or self.base_class,
-            custom_template_dir=self.custom_template_dir,
-            extra_template_data=self.extra_template_data,
-            path=self.current_source_path,
-            nullable=obj.type_has_null,
-        )
-        self.results.append(data_model_root_type)
-        return self.data_type(reference=reference)
-
-    def parse_enum_as_literal(self, obj: JsonSchemaObject) -> DataType:
-        return self.data_type(literals=[i for i in obj.enum if i is not None])
-
-    def parse_enum(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-        singular_name: bool = False,
-        unique: bool = True,
-    ) -> DataType:
-        if not unique:  # pragma: no cover
-            warn(
-                f'{self.__class__.__name__}.parse_enum() ignore `unique` argument.'
-                f'An object name must be unique.'
-                f'This argument will be removed in a future version'
-            )
-        enum_fields: List[DataModelFieldBase] = []
-
-        if None in obj.enum and obj.type == 'string':
-            # Nullable is valid in only OpenAPI
-            nullable: bool = True
-            enum_times = [e for e in obj.enum if e is not None]
-        else:
-            enum_times = obj.enum
-            nullable = False
-
-        exclude_field_names: Set[str] = set()
-
-        for i, enum_part in enumerate(enum_times):
-            if obj.type == 'string' or isinstance(enum_part, str):
-                default = (
-                    f"'{enum_part.translate(escape_characters)}'"
-                    if isinstance(enum_part, str)
-                    else enum_part
-                )
-                if obj.x_enum_varnames:
-                    field_name = obj.x_enum_varnames[i]
-                else:
-                    field_name = str(enum_part)
-            else:
-                default = enum_part
-                if obj.x_enum_varnames:
-                    field_name = obj.x_enum_varnames[i]
-                else:
-                    prefix = (
-                        obj.type
-                        if isinstance(obj.type, str)
-                        else type(enum_part).__name__
-                    )
-                    field_name = f'{prefix}_{enum_part}'
-            field_name = self.model_resolver.get_valid_field_name(
-                field_name, excludes=exclude_field_names, model_type=ModelType.ENUM
-            )
-            exclude_field_names.add(field_name)
-            enum_fields.append(
-                self.data_model_field_type(
-                    name=field_name,
-                    default=default,
-                    data_type=self.data_type_manager.get_data_type(
-                        Types.any,
-                    ),
-                    required=True,
-                    strip_default_none=self.strip_default_none,
-                    has_default=obj.has_default,
-                    use_field_description=self.use_field_description,
-                    original_name=None,
-                )
-            )
-
-        def create_enum(reference_: Reference) -> DataType:
-            enum = Enum(
-                reference=reference_,
-                fields=enum_fields,
-                path=self.current_source_path,
-                description=obj.description if self.use_schema_description else None,
-                custom_template_dir=self.custom_template_dir,
-                type_=_get_type(obj.type, obj.format)
-                if self.use_subclass_enum and isinstance(obj.type, str)
-                else None,
-                default=obj.default if obj.has_default else UNDEFINED,
-            )
-            self.results.append(enum)
-            return self.data_type(reference=reference_)
-
-        if self.use_title_as_name and obj.title:
-            name = obj.title
-        reference = self.model_resolver.add(
-            path,
-            name,
-            class_name=True,
-            singular_name=singular_name,
-            singular_name_suffix='Enum',
-            loaded=True,
-        )
-
-        if not nullable:
-            return create_enum(reference)
-
-        enum_reference = self.model_resolver.add(
-            [*path, 'Enum'],
-            f'{reference.name}Enum',
-            class_name=True,
-            singular_name=singular_name,
-            singular_name_suffix='Enum',
-            loaded=True,
-        )
-
-        data_model_root_type = self.data_model_root_type(
-            reference=reference,
-            fields=[
-                self.data_model_field_type(
-                    data_type=create_enum(enum_reference),
-                    default=obj.default,
-                    required=False,
-                    nullable=True,
-                    strip_default_none=self.strip_default_none,
-                    extras=self.get_field_extras(obj),
-                    use_annotated=self.use_annotated,
-                    has_default=obj.has_default,
-                    use_field_description=self.use_field_description,
-                    original_name=None,
-                )
-            ],
-            custom_base_class=obj.custom_base_path or self.base_class,
-            custom_template_dir=self.custom_template_dir,
-            extra_template_data=self.extra_template_data,
-            path=self.current_source_path,
-            default=obj.default if obj.has_default else UNDEFINED,
-            nullable=obj.type_has_null,
-        )
-        self.results.append(data_model_root_type)
-        return self.data_type(reference=reference)
-
-    def _get_ref_body(self, resolved_ref: str) -> Dict[Any, Any]:
-        if is_url(resolved_ref):
-            return self._get_ref_body_from_url(resolved_ref)
-        return self._get_ref_body_from_remote(resolved_ref)
-
-    def _get_ref_body_from_url(self, ref: str) -> Dict[Any, Any]:
-        # URL Reference – $ref: 'http://path/to/your/resource' Uses the whole document located on the different server.
-        return self.remote_object_cache.get_or_put(
-            ref, default_factory=lambda key: load_yaml(self._get_text_from_url(key))
-        )
-
-    def _get_ref_body_from_remote(self, resolved_ref: str) -> Dict[Any, Any]:
-        # Remote Reference – $ref: 'document.json' Uses the whole document located on the same server and in
-        # the same location. TODO treat edge case
-        full_path = self.base_path / resolved_ref
-
-        return self.remote_object_cache.get_or_put(
-            str(full_path),
-            default_factory=lambda _: load_yaml_from_path(full_path, self.encoding),
-        )
-
-    def resolve_ref(self, object_ref: str) -> Reference:
-        reference = self.model_resolver.add_ref(object_ref)
-        if reference.loaded:
-            return reference
-
-        # https://swagger.io/docs/specification/using-ref/
-        ref = self.model_resolver.resolve_ref(object_ref)
-        if get_ref_type(object_ref) == JSONReference.LOCAL:
-            # Local Reference – $ref: '#/definitions/myElement'
-            self.reserved_refs[tuple(self.model_resolver.current_root)].add(ref)  # type: ignore
-            return reference
-        elif self.model_resolver.is_after_load(ref):
-            self.reserved_refs[tuple(ref.split('#')[0].split('/'))].add(ref)  # type: ignore
-            return reference
-
-        if is_url(ref):
-            relative_path, object_path = ref.split('#')
-            relative_paths = [relative_path]
-            base_path = None
-        else:
-            if self.model_resolver.is_external_root_ref(ref):
-                relative_path, object_path = ref[:-1], ''
-            else:
-                relative_path, object_path = ref.split('#')
-            relative_paths = relative_path.split('/')
-            base_path = Path(*relative_paths).parent
-        with self.model_resolver.current_base_path_context(
-            base_path
-        ), self.model_resolver.base_url_context(relative_path):
-            self._parse_file(
-                self._get_ref_body(relative_path),
-                self.model_resolver.add_ref(ref, resolved=True).name,
-                relative_paths,
-                object_path.split('/') if object_path else None,
-            )
-        reference.loaded = True
-        return reference
-
-    def parse_ref(self, obj: JsonSchemaObject, path: List[str]) -> None:
-        if obj.ref:
-            self.resolve_ref(obj.ref)
-        if obj.items:
-            if isinstance(obj.items, JsonSchemaObject):
-                self.parse_ref(obj.items, path)
-            else:
-                if isinstance(obj.items, list):
-                    for item in obj.items:
-                        self.parse_ref(item, path)
-        if isinstance(obj.additionalProperties, JsonSchemaObject):
-            self.parse_ref(obj.additionalProperties, path)
-        if obj.patternProperties:
-            for value in obj.patternProperties.values():
-                self.parse_ref(value, path)
-        for item in obj.anyOf:
-            self.parse_ref(item, path)
-        for item in obj.allOf:
-            self.parse_ref(item, path)
-        for item in obj.oneOf:
-            self.parse_ref(item, path)
-        if obj.properties:
-            for property_value in obj.properties.values():
-                if isinstance(property_value, JsonSchemaObject):
-                    self.parse_ref(property_value, path)
-
-    def parse_id(self, obj: JsonSchemaObject, path: List[str]) -> None:
-        if obj.id:
-            self.model_resolver.add_id(obj.id, path)
-        if obj.items:
-            if isinstance(obj.items, JsonSchemaObject):
-                self.parse_id(obj.items, path)
-            else:
-                if isinstance(obj.items, list):
-                    for item in obj.items:
-                        self.parse_id(item, path)
-        if isinstance(obj.additionalProperties, JsonSchemaObject):
-            self.parse_id(obj.additionalProperties, path)
-        if obj.patternProperties:
-            for value in obj.patternProperties.values():
-                self.parse_id(value, path)
-        for item in obj.anyOf:
-            self.parse_id(item, path)
-        for item in obj.allOf:
-            self.parse_id(item, path)
-        if obj.properties:
-            for property_value in obj.properties.values():
-                if isinstance(property_value, JsonSchemaObject):
-                    self.parse_id(property_value, path)
-
-    @contextmanager
-    def root_id_context(self, root_raw: Dict[str, Any]) -> Generator[None, None, None]:
-        root_id: Optional[str] = root_raw.get('$id')
-        previous_root_id: Optional[str] = self.root_id
-        self.root_id = root_id if root_id else None
-        yield
-        self.root_id = previous_root_id
-
-    def parse_raw_obj(
-        self,
-        name: str,
-        raw: Dict[str, Any],
-        path: List[str],
-    ) -> None:
-        self.parse_obj(name, self.SCHEMA_OBJECT_TYPE.parse_obj(raw), path)
-
-    def parse_obj(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-    ) -> None:
-        if obj.is_array:
-            self.parse_array(name, obj, path)
-        elif obj.allOf:
-            self.parse_all_of(name, obj, path)
-        elif obj.oneOf or obj.anyOf:
-            data_type = self.parse_root_type(name, obj, path)
-            if isinstance(data_type, EmptyDataType) and obj.properties:
-                self.parse_object(name, obj, path)  # pragma: no cover
-        elif obj.properties:
-            self.parse_object(name, obj, path)
-        elif obj.patternProperties:
-            self.parse_root_type(name, obj, path)
-        elif obj.type == 'object':
-            self.parse_object(name, obj, path)
-        elif obj.enum and not self.should_parse_enum_as_literal(obj):
-            self.parse_enum(name, obj, path)
-        else:
-            self.parse_root_type(name, obj, path)
-        self.parse_ref(obj, path)
-
-    def _get_context_source_path_parts(self) -> Iterator[Tuple[Source, List[str]]]:
-        if isinstance(self.source, list) or (
-            isinstance(self.source, Path) and self.source.is_dir()
-        ):
-            self.current_source_path = Path()
-            self.model_resolver.after_load_files = {
-                self.base_path.joinpath(s.path).resolve().as_posix()
-                for s in self.iter_source
-            }
-
-        for source in self.iter_source:
-            if isinstance(self.source, ParseResult):
-                path_parts = self.get_url_path_parts(self.source)
-            else:
-                path_parts = list(source.path.parts)
-            if self.current_source_path is not None:
-                self.current_source_path = source.path
-            with self.model_resolver.current_base_path_context(
-                source.path.parent
-            ), self.model_resolver.current_root_context(path_parts):
-                yield source, path_parts
-
-    def parse_raw(self) -> None:
-        for source, path_parts in self._get_context_source_path_parts():
-            self.raw_obj = load_yaml(source.text)
-            if self.raw_obj is None:  # pragma: no cover
-                warn(f'{source.path} is empty. Skipping this file')
-                continue
-            if self.custom_class_name_generator:
-                obj_name = self.raw_obj.get('title', 'Model')
-            else:
-                if self.class_name:
-                    obj_name = self.class_name
-                else:
-                    # backward compatible
-                    obj_name = self.raw_obj.get('title', 'Model')
-                    if not self.model_resolver.validate_name(obj_name):
-                        obj_name = title_to_class_name(obj_name)
-                if not self.model_resolver.validate_name(obj_name):
-                    raise InvalidClassNameError(obj_name)
-            self._parse_file(self.raw_obj, obj_name, path_parts)
-
-        self._resolve_unparsed_json_pointer()
-
-    def _resolve_unparsed_json_pointer(self) -> None:
-        model_count: int = len(self.results)
-        for source in self.iter_source:
-            path_parts = list(source.path.parts)
-            reserved_refs = self.reserved_refs.get(tuple(path_parts))  # type: ignore
-            if not reserved_refs:
-                continue
-            if self.current_source_path is not None:
-                self.current_source_path = source.path
-
-            with self.model_resolver.current_base_path_context(
-                source.path.parent
-            ), self.model_resolver.current_root_context(path_parts):
-                for reserved_ref in sorted(reserved_refs):
-                    if self.model_resolver.add_ref(reserved_ref, resolved=True).loaded:
-                        continue
-                    # for root model
-                    self.raw_obj = load_yaml(source.text)
-                    self.parse_json_pointer(self.raw_obj, reserved_ref, path_parts)
-
-        if model_count != len(self.results):
-            # New model have been generated. It try to resolve json pointer again.
-            self._resolve_unparsed_json_pointer()
-
-    def parse_json_pointer(
-        self, raw: Dict[str, Any], ref: str, path_parts: List[str]
-    ) -> None:
-        path = ref.split('#', 1)[-1]
-        if path[0] == '/':  # pragma: no cover
-            path = path[1:]
-        object_paths = path.split('/')
-        models = get_model_by_path(raw, object_paths)
-        model_name = object_paths[-1]
-
-        self.parse_raw_obj(
-            model_name, models, [*path_parts, f'#/{object_paths[0]}', *object_paths[1:]]
-        )
-
-    def _parse_file(
-        self,
-        raw: Dict[str, Any],
-        obj_name: str,
-        path_parts: List[str],
-        object_paths: Optional[List[str]] = None,
-    ) -> None:
-        object_paths = [o for o in object_paths or [] if o]
-        if object_paths:
-            path = [*path_parts, f'#/{object_paths[0]}', *object_paths[1:]]
-        else:
-            path = path_parts
-        with self.model_resolver.current_root_context(path_parts):
-            obj_name = self.model_resolver.add(
-                path, obj_name, unique=False, class_name=True
-            ).name
-            with self.root_id_context(raw):
-                # Some jsonschema docs include attribute self to have include version details
-                raw.pop('self', None)
-                # parse $id before parsing $ref
-                root_obj = self.SCHEMA_OBJECT_TYPE.parse_obj(raw)
-                self.parse_id(root_obj, path_parts)
-                definitions: Optional[Dict[Any, Any]] = None
-                for schema_path, split_schema_path in self.schema_paths:
-                    try:
-                        definitions = get_model_by_path(raw, split_schema_path)
-                        if definitions:
-                            break
-                    except KeyError:
-                        continue
-                if definitions is None:
-                    definitions = {}
-
-                for key, model in definitions.items():
-                    obj = self.SCHEMA_OBJECT_TYPE.parse_obj(model)
-                    self.parse_id(obj, [*path_parts, schema_path, key])
-
-                if object_paths:
-                    models = get_model_by_path(raw, object_paths)
-                    model_name = object_paths[-1]
-                    self.parse_obj(
-                        model_name, self.SCHEMA_OBJECT_TYPE.parse_obj(models), path
-                    )
-                else:
-                    self.parse_obj(obj_name, root_obj, path_parts or ['#'])
-                for key, model in definitions.items():
-                    path = [*path_parts, schema_path, key]
-                    reference = self.model_resolver.get(path)
-                    if not reference or not reference.loaded:
-                        self.parse_raw_obj(key, model, path)
-
-                key = tuple(path_parts)
-                reserved_refs = set(self.reserved_refs.get(key) or [])
-                while reserved_refs:
-                    for reserved_path in sorted(reserved_refs):
-                        reference = self.model_resolver.get(reserved_path)
-                        if not reference or reference.loaded:
-                            continue
-                        object_paths = reserved_path.split('#/', 1)[-1].split('/')
-                        path = reserved_path.split('/')
-                        models = get_model_by_path(raw, object_paths)
-                        model_name = object_paths[-1]
-                        self.parse_obj(
-                            model_name, self.SCHEMA_OBJECT_TYPE.parse_obj(models), path
-                        )
-                    previous_reserved_refs = reserved_refs
-                    reserved_refs = set(self.reserved_refs.get(key) or [])
-                    if previous_reserved_refs == reserved_refs:
-                        break
diff -pruN 0.26.4-3/datamodel_code_generator/parser/openapi.py 0.34.0-1/datamodel_code_generator/parser/openapi.py
--- 0.26.4-3/datamodel_code_generator/parser/openapi.py	2024-12-15 17:25:57.706037000 +0000
+++ 0.34.0-1/datamodel_code_generator/parser/openapi.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,648 +0,0 @@
-from __future__ import annotations
-
-import re
-from collections import defaultdict
-from enum import Enum
-from pathlib import Path
-from typing import (
-    Any,
-    Callable,
-    ClassVar,
-    DefaultDict,
-    Dict,
-    Iterable,
-    List,
-    Mapping,
-    Optional,
-    Pattern,
-    Sequence,
-    Set,
-    Tuple,
-    Type,
-    TypeVar,
-    Union,
-)
-from urllib.parse import ParseResult
-from warnings import warn
-
-from pydantic import Field
-
-from datamodel_code_generator import (
-    DefaultPutDict,
-    Error,
-    LiteralType,
-    OpenAPIScope,
-    PythonVersion,
-    load_yaml,
-    snooper_to_methods,
-)
-from datamodel_code_generator.format import DatetimeClassType
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model import pydantic as pydantic_model
-from datamodel_code_generator.parser.base import get_special_path
-from datamodel_code_generator.parser.jsonschema import (
-    JsonSchemaObject,
-    JsonSchemaParser,
-    get_model_by_path,
-)
-from datamodel_code_generator.reference import snake_to_upper_camel
-from datamodel_code_generator.types import (
-    DataType,
-    DataTypeManager,
-    EmptyDataType,
-    StrictTypes,
-)
-from datamodel_code_generator.util import BaseModel
-
-RE_APPLICATION_JSON_PATTERN: Pattern[str] = re.compile(r'^application/.*json$')
-
-OPERATION_NAMES: List[str] = [
-    'get',
-    'put',
-    'post',
-    'delete',
-    'patch',
-    'head',
-    'options',
-    'trace',
-]
-
-
-class ParameterLocation(Enum):
-    query = 'query'
-    header = 'header'
-    path = 'path'
-    cookie = 'cookie'
-
-
-BaseModelT = TypeVar('BaseModelT', bound=BaseModel)
-
-
-class ReferenceObject(BaseModel):
-    ref: str = Field(..., alias='$ref')
-
-
-class ExampleObject(BaseModel):
-    summary: Optional[str] = None
-    description: Optional[str] = None
-    value: Any = None
-    externalValue: Optional[str] = None
-
-
-class MediaObject(BaseModel):
-    schema_: Union[ReferenceObject, JsonSchemaObject, None] = Field(
-        None, alias='schema'
-    )
-    example: Any = None
-    examples: Union[str, ReferenceObject, ExampleObject, None] = None
-
-
-class ParameterObject(BaseModel):
-    name: Optional[str] = None
-    in_: Optional[ParameterLocation] = Field(None, alias='in')
-    description: Optional[str] = None
-    required: bool = False
-    deprecated: bool = False
-    schema_: Optional[JsonSchemaObject] = Field(None, alias='schema')
-    example: Any = None
-    examples: Union[str, ReferenceObject, ExampleObject, None] = None
-    content: Dict[str, MediaObject] = {}
-
-
-class HeaderObject(BaseModel):
-    description: Optional[str] = None
-    required: bool = False
-    deprecated: bool = False
-    schema_: Optional[JsonSchemaObject] = Field(None, alias='schema')
-    example: Any = None
-    examples: Union[str, ReferenceObject, ExampleObject, None] = None
-    content: Dict[str, MediaObject] = {}
-
-
-class RequestBodyObject(BaseModel):
-    description: Optional[str] = None
-    content: Dict[str, MediaObject] = {}
-    required: bool = False
-
-
-class ResponseObject(BaseModel):
-    description: Optional[str] = None
-    headers: Dict[str, ParameterObject] = {}
-    content: Dict[Union[str, int], MediaObject] = {}
-
-
-class Operation(BaseModel):
-    tags: List[str] = []
-    summary: Optional[str] = None
-    description: Optional[str] = None
-    operationId: Optional[str] = None
-    parameters: List[Union[ReferenceObject, ParameterObject]] = []
-    requestBody: Union[ReferenceObject, RequestBodyObject, None] = None
-    responses: Dict[Union[str, int], Union[ReferenceObject, ResponseObject]] = {}
-    deprecated: bool = False
-
-
-class ComponentsObject(BaseModel):
-    schemas: Dict[str, Union[ReferenceObject, JsonSchemaObject]] = {}
-    responses: Dict[str, Union[ReferenceObject, ResponseObject]] = {}
-    examples: Dict[str, Union[ReferenceObject, ExampleObject]] = {}
-    requestBodies: Dict[str, Union[ReferenceObject, RequestBodyObject]] = {}
-    headers: Dict[str, Union[ReferenceObject, HeaderObject]] = {}
-
-
-@snooper_to_methods(max_variable_length=None)
-class OpenAPIParser(JsonSchemaParser):
-    SCHEMA_PATHS: ClassVar[List[str]] = ['#/components/schemas']
-
-    def __init__(
-        self,
-        source: Union[str, Path, List[Path], ParseResult],
-        *,
-        data_model_type: Type[DataModel] = pydantic_model.BaseModel,
-        data_model_root_type: Type[DataModel] = pydantic_model.CustomRootType,
-        data_type_manager_type: Type[DataTypeManager] = pydantic_model.DataTypeManager,
-        data_model_field_type: Type[DataModelFieldBase] = pydantic_model.DataModelField,
-        base_class: Optional[str] = None,
-        additional_imports: Optional[List[str]] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        target_python_version: PythonVersion = PythonVersion.PY_38,
-        dump_resolve_reference_action: Optional[Callable[[Iterable[str]], str]] = None,
-        validation: bool = False,
-        field_constraints: bool = False,
-        snake_case_field: bool = False,
-        strip_default_none: bool = False,
-        aliases: Optional[Mapping[str, str]] = None,
-        allow_population_by_field_name: bool = False,
-        allow_extra_fields: bool = False,
-        apply_default_values_for_required_fields: bool = False,
-        force_optional_for_required_fields: bool = False,
-        class_name: Optional[str] = None,
-        use_standard_collections: bool = False,
-        base_path: Optional[Path] = None,
-        use_schema_description: bool = False,
-        use_field_description: bool = False,
-        use_default_kwarg: bool = False,
-        reuse_model: bool = False,
-        encoding: str = 'utf-8',
-        enum_field_as_literal: Optional[LiteralType] = None,
-        use_one_literal_as_default: bool = False,
-        set_default_enum_member: bool = False,
-        use_subclass_enum: bool = False,
-        strict_nullable: bool = False,
-        use_generic_container_types: bool = False,
-        enable_faux_immutability: bool = False,
-        remote_text_cache: Optional[DefaultPutDict[str, str]] = None,
-        disable_appending_item_suffix: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        empty_enum_field_name: Optional[str] = None,
-        custom_class_name_generator: Optional[Callable[[str], str]] = None,
-        field_extra_keys: Optional[Set[str]] = None,
-        field_include_all_keys: bool = False,
-        field_extra_keys_without_x_prefix: Optional[Set[str]] = None,
-        openapi_scopes: Optional[List[OpenAPIScope]] = None,
-        wrap_string_literal: Optional[bool] = False,
-        use_title_as_name: bool = False,
-        use_operation_id_as_name: bool = False,
-        use_unique_items_as_set: bool = False,
-        http_headers: Optional[Sequence[Tuple[str, str]]] = None,
-        http_ignore_tls: bool = False,
-        use_annotated: bool = False,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        original_field_name_delimiter: Optional[str] = None,
-        use_double_quotes: bool = False,
-        use_union_operator: bool = False,
-        allow_responses_without_content: bool = False,
-        collapse_root_models: bool = False,
-        special_field_name_prefix: Optional[str] = None,
-        remove_special_field_name_prefix: bool = False,
-        capitalise_enum_members: bool = False,
-        keep_model_order: bool = False,
-        known_third_party: Optional[List[str]] = None,
-        custom_formatters: Optional[List[str]] = None,
-        custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
-        use_pendulum: bool = False,
-        http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
-        treat_dots_as_module: bool = False,
-        use_exact_imports: bool = False,
-        default_field_extras: Optional[Dict[str, Any]] = None,
-        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
-        keyword_only: bool = False,
-        no_alias: bool = False,
-    ):
-        super().__init__(
-            source=source,
-            data_model_type=data_model_type,
-            data_model_root_type=data_model_root_type,
-            data_type_manager_type=data_type_manager_type,
-            data_model_field_type=data_model_field_type,
-            base_class=base_class,
-            additional_imports=additional_imports,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            target_python_version=target_python_version,
-            dump_resolve_reference_action=dump_resolve_reference_action,
-            validation=validation,
-            field_constraints=field_constraints,
-            snake_case_field=snake_case_field,
-            strip_default_none=strip_default_none,
-            aliases=aliases,
-            allow_population_by_field_name=allow_population_by_field_name,
-            allow_extra_fields=allow_extra_fields,
-            apply_default_values_for_required_fields=apply_default_values_for_required_fields,
-            force_optional_for_required_fields=force_optional_for_required_fields,
-            class_name=class_name,
-            use_standard_collections=use_standard_collections,
-            base_path=base_path,
-            use_schema_description=use_schema_description,
-            use_field_description=use_field_description,
-            use_default_kwarg=use_default_kwarg,
-            reuse_model=reuse_model,
-            encoding=encoding,
-            enum_field_as_literal=enum_field_as_literal,
-            use_one_literal_as_default=use_one_literal_as_default,
-            set_default_enum_member=set_default_enum_member,
-            use_subclass_enum=use_subclass_enum,
-            strict_nullable=strict_nullable,
-            use_generic_container_types=use_generic_container_types,
-            enable_faux_immutability=enable_faux_immutability,
-            remote_text_cache=remote_text_cache,
-            disable_appending_item_suffix=disable_appending_item_suffix,
-            strict_types=strict_types,
-            empty_enum_field_name=empty_enum_field_name,
-            custom_class_name_generator=custom_class_name_generator,
-            field_extra_keys=field_extra_keys,
-            field_include_all_keys=field_include_all_keys,
-            field_extra_keys_without_x_prefix=field_extra_keys_without_x_prefix,
-            wrap_string_literal=wrap_string_literal,
-            use_title_as_name=use_title_as_name,
-            use_operation_id_as_name=use_operation_id_as_name,
-            use_unique_items_as_set=use_unique_items_as_set,
-            http_headers=http_headers,
-            http_ignore_tls=http_ignore_tls,
-            use_annotated=use_annotated,
-            use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
-            original_field_name_delimiter=original_field_name_delimiter,
-            use_double_quotes=use_double_quotes,
-            use_union_operator=use_union_operator,
-            allow_responses_without_content=allow_responses_without_content,
-            collapse_root_models=collapse_root_models,
-            special_field_name_prefix=special_field_name_prefix,
-            remove_special_field_name_prefix=remove_special_field_name_prefix,
-            capitalise_enum_members=capitalise_enum_members,
-            keep_model_order=keep_model_order,
-            known_third_party=known_third_party,
-            custom_formatters=custom_formatters,
-            custom_formatters_kwargs=custom_formatters_kwargs,
-            use_pendulum=use_pendulum,
-            http_query_parameters=http_query_parameters,
-            treat_dots_as_module=treat_dots_as_module,
-            use_exact_imports=use_exact_imports,
-            default_field_extras=default_field_extras,
-            target_datetime_class=target_datetime_class,
-            keyword_only=keyword_only,
-            no_alias=no_alias,
-        )
-        self.open_api_scopes: List[OpenAPIScope] = openapi_scopes or [
-            OpenAPIScope.Schemas
-        ]
-
-    def get_ref_model(self, ref: str) -> Dict[str, Any]:
-        ref_file, ref_path = self.model_resolver.resolve_ref(ref).split('#', 1)
-        if ref_file:
-            ref_body = self._get_ref_body(ref_file)
-        else:  # pragma: no cover
-            ref_body = self.raw_obj
-        return get_model_by_path(ref_body, ref_path.split('/')[1:])
-
-    def get_data_type(self, obj: JsonSchemaObject) -> DataType:
-        # OpenAPI 3.0 doesn't allow `null` in the `type` field and list of types
-        # https://swagger.io/docs/specification/data-models/data-types/#null
-        # OpenAPI 3.1 does allow `null` in the `type` field and is equivalent to
-        # a `nullable` flag on the property itself
-        if obj.nullable and self.strict_nullable and isinstance(obj.type, str):
-            obj.type = [obj.type, 'null']
-
-        return super().get_data_type(obj)
-
-    def resolve_object(
-        self, obj: Union[ReferenceObject, BaseModelT], object_type: Type[BaseModelT]
-    ) -> BaseModelT:
-        if isinstance(obj, ReferenceObject):
-            ref_obj = self.get_ref_model(obj.ref)
-            return object_type.parse_obj(ref_obj)
-        return obj
-
-    def parse_schema(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-    ) -> DataType:
-        if obj.is_array:
-            data_type = self.parse_array(name, obj, [*path, name])
-        elif obj.allOf:  # pragma: no cover
-            data_type = self.parse_all_of(name, obj, path)
-        elif obj.oneOf or obj.anyOf:  # pragma: no cover
-            data_type = self.parse_root_type(name, obj, path)
-            if isinstance(data_type, EmptyDataType) and obj.properties:
-                self.parse_object(name, obj, path)
-        elif obj.is_object:
-            data_type = self.parse_object(name, obj, path)
-        elif obj.enum:  # pragma: no cover
-            data_type = self.parse_enum(name, obj, path)
-        elif obj.ref:  # pragma: no cover
-            data_type = self.get_ref_data_type(obj.ref)
-        else:
-            data_type = self.get_data_type(obj)
-        self.parse_ref(obj, path)
-        return data_type
-
-    def parse_request_body(
-        self,
-        name: str,
-        request_body: RequestBodyObject,
-        path: List[str],
-    ) -> None:
-        for (
-            media_type,
-            media_obj,
-        ) in request_body.content.items():  # type: str, MediaObject
-            if isinstance(media_obj.schema_, JsonSchemaObject):
-                self.parse_schema(name, media_obj.schema_, [*path, media_type])
-
-    def parse_responses(
-        self,
-        name: str,
-        responses: Dict[Union[str, int], Union[ReferenceObject, ResponseObject]],
-        path: List[str],
-    ) -> Dict[Union[str, int], Dict[str, DataType]]:
-        data_types: DefaultDict[Union[str, int], Dict[str, DataType]] = defaultdict(
-            dict
-        )
-        for status_code, detail in responses.items():
-            if isinstance(detail, ReferenceObject):
-                if not detail.ref:  # pragma: no cover
-                    continue
-                ref_model = self.get_ref_model(detail.ref)
-                content = {
-                    k: MediaObject.parse_obj(v)
-                    for k, v in ref_model.get('content', {}).items()
-                }
-            else:
-                content = detail.content
-
-            if self.allow_responses_without_content and not content:
-                data_types[status_code]['application/json'] = DataType(type='None')
-
-            for content_type, obj in content.items():
-                object_schema = obj.schema_
-                if not object_schema:  # pragma: no cover
-                    continue
-                if isinstance(object_schema, JsonSchemaObject):
-                    data_types[status_code][content_type] = self.parse_schema(
-                        name, object_schema, [*path, str(status_code), content_type]
-                    )
-                else:
-                    data_types[status_code][content_type] = self.get_ref_data_type(
-                        object_schema.ref
-                    )
-
-        return data_types
-
-    @classmethod
-    def parse_tags(
-        cls,
-        name: str,
-        tags: List[str],
-        path: List[str],
-    ) -> List[str]:
-        return tags
-
-    @classmethod
-    def _get_model_name(cls, path_name: str, method: str, suffix: str) -> str:
-        camel_path_name = snake_to_upper_camel(path_name.replace('/', '_'))
-        return f'{camel_path_name}{method.capitalize()}{suffix}'
-
-    def parse_all_parameters(
-        self,
-        name: str,
-        parameters: List[Union[ReferenceObject, ParameterObject]],
-        path: List[str],
-    ) -> None:
-        fields: List[DataModelFieldBase] = []
-        exclude_field_names: Set[str] = set()
-        reference = self.model_resolver.add(path, name, class_name=True, unique=True)
-        for parameter in parameters:
-            parameter = self.resolve_object(parameter, ParameterObject)
-            parameter_name = parameter.name
-            if not parameter_name or parameter.in_ != ParameterLocation.query:
-                continue
-            field_name, alias = self.model_resolver.get_valid_field_name_and_alias(
-                field_name=parameter_name, excludes=exclude_field_names
-            )
-            if parameter.schema_:
-                fields.append(
-                    self.get_object_field(
-                        field_name=field_name,
-                        field=parameter.schema_,
-                        field_type=self.parse_item(
-                            field_name, parameter.schema_, [*path, name, parameter_name]
-                        ),
-                        original_field_name=parameter_name,
-                        required=parameter.required,
-                        alias=alias,
-                    )
-                )
-            else:
-                data_types: List[DataType] = []
-                object_schema: Optional[JsonSchemaObject] = None
-                for (
-                    media_type,
-                    media_obj,
-                ) in parameter.content.items():
-                    if not media_obj.schema_:
-                        continue
-                    object_schema = self.resolve_object(
-                        media_obj.schema_, JsonSchemaObject
-                    )
-                    data_types.append(
-                        self.parse_item(
-                            field_name,
-                            object_schema,
-                            [*path, name, parameter_name, media_type],
-                        )
-                    )
-
-                if not data_types:
-                    continue
-                if len(data_types) == 1:
-                    data_type = data_types[0]
-                else:
-                    data_type = self.data_type(data_types=data_types)
-                    # multiple data_type parse as non-constraints field
-                    object_schema = None
-                fields.append(
-                    self.data_model_field_type(
-                        name=field_name,
-                        default=object_schema.default if object_schema else None,
-                        data_type=data_type,
-                        required=parameter.required,
-                        alias=alias,
-                        constraints=object_schema.dict()
-                        if object_schema and self.is_constraints_field(object_schema)
-                        else None,
-                        nullable=object_schema.nullable
-                        if object_schema
-                        and self.strict_nullable
-                        and (object_schema.has_default or parameter.required)
-                        else None,
-                        strip_default_none=self.strip_default_none,
-                        extras=self.get_field_extras(object_schema)
-                        if object_schema
-                        else {},
-                        use_annotated=self.use_annotated,
-                        use_field_description=self.use_field_description,
-                        use_default_kwarg=self.use_default_kwarg,
-                        original_name=parameter_name,
-                        has_default=object_schema.has_default
-                        if object_schema
-                        else False,
-                        type_has_null=object_schema.type_has_null
-                        if object_schema
-                        else None,
-                    )
-                )
-
-        if OpenAPIScope.Parameters in self.open_api_scopes and fields:
-            self.results.append(
-                self.data_model_type(
-                    fields=fields,
-                    reference=reference,
-                    custom_base_class=self.base_class,
-                    custom_template_dir=self.custom_template_dir,
-                    keyword_only=self.keyword_only,
-                )
-            )
-
-    def parse_operation(
-        self,
-        raw_operation: Dict[str, Any],
-        path: List[str],
-    ) -> None:
-        operation = Operation.parse_obj(raw_operation)
-        path_name, method = path[-2:]
-        if self.use_operation_id_as_name:
-            if not operation.operationId:
-                raise Error(
-                    f'All operations must have an operationId when --use_operation_id_as_name is set.'
-                    f'The following path was missing an operationId: {path_name}'
-                )
-            path_name = operation.operationId
-            method = ''
-        self.parse_all_parameters(
-            self._get_model_name(path_name, method, suffix='ParametersQuery'),
-            operation.parameters,
-            [*path, 'parameters'],
-        )
-        if operation.requestBody:
-            if isinstance(operation.requestBody, ReferenceObject):
-                ref_model = self.get_ref_model(operation.requestBody.ref)
-                request_body = RequestBodyObject.parse_obj(ref_model)
-            else:
-                request_body = operation.requestBody
-            self.parse_request_body(
-                name=self._get_model_name(path_name, method, suffix='Request'),
-                request_body=request_body,
-                path=[*path, 'requestBody'],
-            )
-        self.parse_responses(
-            name=self._get_model_name(path_name, method, suffix='Response'),
-            responses=operation.responses,
-            path=[*path, 'responses'],
-        )
-        if OpenAPIScope.Tags in self.open_api_scopes:
-            self.parse_tags(
-                name=self._get_model_name(path_name, method, suffix='Tags'),
-                tags=operation.tags,
-                path=[*path, 'tags'],
-            )
-
-    def parse_raw(self) -> None:
-        for source, path_parts in self._get_context_source_path_parts():
-            if self.validation:
-                warn(
-                    'Deprecated: `--validation` option is deprecated. the option will be removed in a future '
-                    'release. please use another tool to validate OpenAPI.\n'
-                )
-
-                try:
-                    from prance import BaseParser
-
-                    BaseParser(
-                        spec_string=source.text,
-                        backend='openapi-spec-validator',
-                        encoding=self.encoding,
-                    )
-                except ImportError:  # pragma: no cover
-                    warn(
-                        'Warning: Validation was skipped for OpenAPI. `prance` or `openapi-spec-validator` are not '
-                        'installed.\n'
-                        'To use --validation option after datamodel-code-generator 0.24.0, Please run `$pip install '
-                        "'datamodel-code-generator[validation]'`.\n"
-                    )
-
-            specification: Dict[str, Any] = load_yaml(source.text)
-            self.raw_obj = specification
-            schemas: Dict[Any, Any] = specification.get('components', {}).get(
-                'schemas', {}
-            )
-            security: Optional[List[Dict[str, List[str]]]] = specification.get(
-                'security'
-            )
-            if OpenAPIScope.Schemas in self.open_api_scopes:
-                for (
-                    obj_name,
-                    raw_obj,
-                ) in schemas.items():  # type: str, Dict[Any, Any]
-                    self.parse_raw_obj(
-                        obj_name,
-                        raw_obj,
-                        [*path_parts, '#/components', 'schemas', obj_name],
-                    )
-            if OpenAPIScope.Paths in self.open_api_scopes:
-                paths: Dict[str, Dict[str, Any]] = specification.get('paths', {})
-                parameters: List[Dict[str, Any]] = [
-                    self._get_ref_body(p['$ref']) if '$ref' in p else p
-                    for p in paths.get('parameters', [])
-                    if isinstance(p, dict)
-                ]
-                paths_path = [*path_parts, '#/paths']
-                for path_name, methods in paths.items():
-                    # Resolve path items if applicable
-                    if '$ref' in methods:
-                        methods = self.get_ref_model(methods['$ref'])
-                    paths_parameters = parameters[:]
-                    if 'parameters' in methods:
-                        paths_parameters.extend(methods['parameters'])
-                    relative_path_name = path_name[1:]
-                    if relative_path_name:
-                        path = [*paths_path, relative_path_name]
-                    else:  # pragma: no cover
-                        path = get_special_path('root', paths_path)
-                    for operation_name, raw_operation in methods.items():
-                        if operation_name not in OPERATION_NAMES:
-                            continue
-                        if paths_parameters:
-                            if 'parameters' in raw_operation:  # pragma: no cover
-                                raw_operation['parameters'].extend(paths_parameters)
-                            else:
-                                raw_operation['parameters'] = paths_parameters
-                        if security is not None and 'security' not in raw_operation:
-                            raw_operation['security'] = security
-                        self.parse_operation(
-                            raw_operation,
-                            [*path, operation_name],
-                        )
-
-        self._resolve_unparsed_json_pointer()
diff -pruN 0.26.4-3/datamodel_code_generator/pydantic_patch.py 0.34.0-1/datamodel_code_generator/pydantic_patch.py
--- 0.26.4-3/datamodel_code_generator/pydantic_patch.py	2024-12-15 17:25:57.706037000 +0000
+++ 0.34.0-1/datamodel_code_generator/pydantic_patch.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,22 +0,0 @@
-import sys
-
-import pydantic.typing
-
-
-def patched_evaluate_forwardref(
-    forward_ref, globalns, localns=None
-):  # pragma: no cover
-    try:
-        return forward_ref._evaluate(
-            globalns, localns or None, set()
-        )  # pragma: no cover
-    except TypeError:
-        # Fallback for Python 3.12 compatibility
-        return forward_ref._evaluate(
-            globalns, localns or None, set(), recursive_guard=set()
-        )
-
-
-# Patch only Python3.12
-if sys.version_info >= (3, 12):
-    pydantic.typing.evaluate_forwardref = patched_evaluate_forwardref
diff -pruN 0.26.4-3/datamodel_code_generator/reference.py 0.34.0-1/datamodel_code_generator/reference.py
--- 0.26.4-3/datamodel_code_generator/reference.py	2024-12-15 17:25:57.706037000 +0000
+++ 0.34.0-1/datamodel_code_generator/reference.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,775 +0,0 @@
-import re
-from collections import defaultdict
-from contextlib import contextmanager
-from enum import Enum, auto
-from functools import lru_cache
-from itertools import zip_longest
-from keyword import iskeyword
-from pathlib import Path, PurePath
-from typing import (
-    TYPE_CHECKING,
-    AbstractSet,
-    Any,
-    Callable,
-    ClassVar,
-    DefaultDict,
-    Dict,
-    Generator,
-    List,
-    Mapping,
-    NamedTuple,
-    Optional,
-    Pattern,
-    Sequence,
-    Set,
-    Tuple,
-    Type,
-    TypeVar,
-    Union,
-)
-from urllib.parse import ParseResult, urlparse
-
-import inflect
-import pydantic
-from packaging import version
-from pydantic import BaseModel
-
-from datamodel_code_generator.util import (
-    PYDANTIC_V2,
-    ConfigDict,
-    cached_property,
-    model_validator,
-)
-
-if TYPE_CHECKING:
-    from pydantic.typing import DictStrAny
-
-
-class _BaseModel(BaseModel):
-    _exclude_fields: ClassVar[Set[str]] = set()
-    _pass_fields: ClassVar[Set[str]] = set()
-
-    if not TYPE_CHECKING:
-
-        def __init__(self, **values: Any) -> None:
-            super().__init__(**values)
-            for pass_field_name in self._pass_fields:
-                if pass_field_name in values:
-                    setattr(self, pass_field_name, values[pass_field_name])
-
-    if not TYPE_CHECKING:
-        if PYDANTIC_V2:
-
-            def dict(
-                self,
-                *,
-                include: Union[
-                    AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], None
-                ] = None,
-                exclude: Union[
-                    AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], None
-                ] = None,
-                by_alias: bool = False,
-                exclude_unset: bool = False,
-                exclude_defaults: bool = False,
-                exclude_none: bool = False,
-            ) -> 'DictStrAny':
-                return self.model_dump(
-                    include=include,
-                    exclude=set(exclude or ()) | self._exclude_fields,
-                    by_alias=by_alias,
-                    exclude_unset=exclude_unset,
-                    exclude_defaults=exclude_defaults,
-                    exclude_none=exclude_none,
-                )
-
-        else:
-
-            def dict(
-                self,
-                *,
-                include: Union[
-                    AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], None
-                ] = None,
-                exclude: Union[
-                    AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], None
-                ] = None,
-                by_alias: bool = False,
-                skip_defaults: Optional[bool] = None,
-                exclude_unset: bool = False,
-                exclude_defaults: bool = False,
-                exclude_none: bool = False,
-            ) -> 'DictStrAny':
-                return super().dict(
-                    include=include,
-                    exclude=set(exclude or ()) | self._exclude_fields,
-                    by_alias=by_alias,
-                    skip_defaults=skip_defaults,
-                    exclude_unset=exclude_unset,
-                    exclude_defaults=exclude_defaults,
-                    exclude_none=exclude_none,
-                )
-
-
-class Reference(_BaseModel):
-    path: str
-    original_name: str = ''
-    name: str
-    duplicate_name: Optional[str] = None
-    loaded: bool = True
-    source: Optional[Any] = None
-    children: List[Any] = []
-    _exclude_fields: ClassVar[Set[str]] = {'children'}
-
-    @model_validator(mode='before')
-    def validate_original_name(cls, values: Any) -> Any:
-        """
-        If original_name is empty then, `original_name` is assigned `name`
-        """
-        if not isinstance(values, dict):  # pragma: no cover
-            return values
-        original_name = values.get('original_name')
-        if original_name:
-            return values
-
-        values['original_name'] = values.get('name', original_name)
-        return values
-
-    if PYDANTIC_V2:
-        # TODO[pydantic]: The following keys were removed: `copy_on_model_validation`.
-        # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
-        model_config = ConfigDict(
-            arbitrary_types_allowed=True,
-            ignored_types=(cached_property,),
-            revalidate_instances='never',
-        )
-    else:
-
-        class Config:
-            arbitrary_types_allowed = True
-            keep_untouched = (cached_property,)
-            copy_on_model_validation = (
-                False
-                if version.parse(pydantic.VERSION) < version.parse('1.9.2')
-                else 'none'
-            )
-
-    @property
-    def short_name(self) -> str:
-        return self.name.rsplit('.', 1)[-1]
-
-
-SINGULAR_NAME_SUFFIX: str = 'Item'
-
-ID_PATTERN: Pattern[str] = re.compile(r'^#[^/].*')
-
-T = TypeVar('T')
-
-
-@contextmanager
-def context_variable(
-    setter: Callable[[T], None], current_value: T, new_value: T
-) -> Generator[None, None, None]:
-    previous_value: T = current_value
-    setter(new_value)
-    try:
-        yield
-    finally:
-        setter(previous_value)
-
-
-_UNDER_SCORE_1: Pattern[str] = re.compile(r'([^_])([A-Z][a-z]+)')
-_UNDER_SCORE_2: Pattern[str] = re.compile('([a-z0-9])([A-Z])')
-
-
-@lru_cache()
-def camel_to_snake(string: str) -> str:
-    subbed = _UNDER_SCORE_1.sub(r'\1_\2', string)
-    return _UNDER_SCORE_2.sub(r'\1_\2', subbed).lower()
-
-
-class FieldNameResolver:
-    def __init__(
-        self,
-        aliases: Optional[Mapping[str, str]] = None,
-        snake_case_field: bool = False,
-        empty_field_name: Optional[str] = None,
-        original_delimiter: Optional[str] = None,
-        special_field_name_prefix: Optional[str] = None,
-        remove_special_field_name_prefix: bool = False,
-        capitalise_enum_members: bool = False,
-        no_alias: bool = False,
-    ):
-        self.aliases: Mapping[str, str] = {} if aliases is None else {**aliases}
-        self.empty_field_name: str = empty_field_name or '_'
-        self.snake_case_field = snake_case_field
-        self.original_delimiter: Optional[str] = original_delimiter
-        self.special_field_name_prefix: Optional[str] = (
-            'field' if special_field_name_prefix is None else special_field_name_prefix
-        )
-        self.remove_special_field_name_prefix: bool = remove_special_field_name_prefix
-        self.capitalise_enum_members: bool = capitalise_enum_members
-        self.no_alias = no_alias
-
-    @classmethod
-    def _validate_field_name(cls, field_name: str) -> bool:
-        return True
-
-    def get_valid_name(
-        self,
-        name: str,
-        excludes: Optional[Set[str]] = None,
-        ignore_snake_case_field: bool = False,
-        upper_camel: bool = False,
-    ) -> str:
-        if not name:
-            name = self.empty_field_name
-        if name[0] == '#':
-            name = name[1:] or self.empty_field_name
-
-        if (
-            self.snake_case_field
-            and not ignore_snake_case_field
-            and self.original_delimiter is not None
-        ):
-            name = snake_to_upper_camel(name, delimiter=self.original_delimiter)
-
-        name = re.sub(r'[¹²³⁴⁵⁶⁷⁸⁹]|\W', '_', name)
-        if name[0].isnumeric():
-            name = f'{self.special_field_name_prefix}_{name}'
-
-        # We should avoid having a field begin with an underscore, as it
-        # causes pydantic to consider it as private
-        while name.startswith('_'):
-            if self.remove_special_field_name_prefix:
-                name = name[1:]
-            else:
-                name = f'{self.special_field_name_prefix}{name}'
-                break
-        if (
-            self.capitalise_enum_members
-            or self.snake_case_field
-            and not ignore_snake_case_field
-        ):
-            name = camel_to_snake(name)
-        count = 1
-        if iskeyword(name) or not self._validate_field_name(name):
-            name += '_'
-        if upper_camel:
-            new_name = snake_to_upper_camel(name)
-        elif self.capitalise_enum_members:
-            new_name = name.upper()
-        else:
-            new_name = name
-        while (
-            not (new_name.isidentifier() or not self._validate_field_name(new_name))
-            or iskeyword(new_name)
-            or (excludes and new_name in excludes)
-        ):
-            new_name = f'{name}{count}' if upper_camel else f'{name}_{count}'
-            count += 1
-        return new_name
-
-    def get_valid_field_name_and_alias(
-        self, field_name: str, excludes: Optional[Set[str]] = None
-    ) -> Tuple[str, Optional[str]]:
-        if field_name in self.aliases:
-            return self.aliases[field_name], field_name
-        valid_name = self.get_valid_name(field_name, excludes=excludes)
-        return (
-            valid_name,
-            None if self.no_alias or field_name == valid_name else field_name,
-        )
-
-
-class PydanticFieldNameResolver(FieldNameResolver):
-    @classmethod
-    def _validate_field_name(cls, field_name: str) -> bool:
-        # TODO: Support Pydantic V2
-        return not hasattr(BaseModel, field_name)
-
-
-class EnumFieldNameResolver(FieldNameResolver):
-    def get_valid_name(
-        self,
-        name: str,
-        excludes: Optional[Set[str]] = None,
-        ignore_snake_case_field: bool = False,
-        upper_camel: bool = False,
-    ) -> str:
-        return super().get_valid_name(
-            name='mro_' if name == 'mro' else name,
-            excludes={'mro'} | (excludes or set()),
-            ignore_snake_case_field=ignore_snake_case_field,
-            upper_camel=upper_camel,
-        )
-
-
-class ModelType(Enum):
-    PYDANTIC = auto()
-    ENUM = auto()
-    CLASS = auto()
-
-
-DEFAULT_FIELD_NAME_RESOLVERS: Dict[ModelType, Type[FieldNameResolver]] = {
-    ModelType.ENUM: EnumFieldNameResolver,
-    ModelType.PYDANTIC: PydanticFieldNameResolver,
-    ModelType.CLASS: FieldNameResolver,
-}
-
-
-class ClassName(NamedTuple):
-    name: str
-    duplicate_name: Optional[str]
-
-
-def get_relative_path(base_path: PurePath, target_path: PurePath) -> PurePath:
-    if base_path == target_path:
-        return Path('.')
-    if not target_path.is_absolute():
-        return target_path
-    parent_count: int = 0
-    children: List[str] = []
-    for base_part, target_part in zip_longest(base_path.parts, target_path.parts):
-        if base_part == target_part and not parent_count:
-            continue
-        if base_part or not target_part:
-            parent_count += 1
-        if target_part:
-            children.append(target_part)
-    return Path(*['..' for _ in range(parent_count)], *children)
-
-
-class ModelResolver:
-    def __init__(
-        self,
-        exclude_names: Optional[Set[str]] = None,
-        duplicate_name_suffix: Optional[str] = None,
-        base_url: Optional[str] = None,
-        singular_name_suffix: Optional[str] = None,
-        aliases: Optional[Mapping[str, str]] = None,
-        snake_case_field: bool = False,
-        empty_field_name: Optional[str] = None,
-        custom_class_name_generator: Optional[Callable[[str], str]] = None,
-        base_path: Optional[Path] = None,
-        field_name_resolver_classes: Optional[
-            Dict[ModelType, Type[FieldNameResolver]]
-        ] = None,
-        original_field_name_delimiter: Optional[str] = None,
-        special_field_name_prefix: Optional[str] = None,
-        remove_special_field_name_prefix: bool = False,
-        capitalise_enum_members: bool = False,
-        no_alias: bool = False,
-    ) -> None:
-        self.references: Dict[str, Reference] = {}
-        self._current_root: Sequence[str] = []
-        self._root_id: Optional[str] = None
-        self._root_id_base_path: Optional[str] = None
-        self.ids: DefaultDict[str, Dict[str, str]] = defaultdict(dict)
-        self.after_load_files: Set[str] = set()
-        self.exclude_names: Set[str] = exclude_names or set()
-        self.duplicate_name_suffix: Optional[str] = duplicate_name_suffix
-        self._base_url: Optional[str] = base_url
-        self.singular_name_suffix: str = (
-            singular_name_suffix
-            if isinstance(singular_name_suffix, str)
-            else SINGULAR_NAME_SUFFIX
-        )
-        merged_field_name_resolver_classes = DEFAULT_FIELD_NAME_RESOLVERS.copy()
-        if field_name_resolver_classes:  # pragma: no cover
-            merged_field_name_resolver_classes.update(field_name_resolver_classes)
-        self.field_name_resolvers: Dict[ModelType, FieldNameResolver] = {
-            k: v(
-                aliases=aliases,
-                snake_case_field=snake_case_field,
-                empty_field_name=empty_field_name,
-                original_delimiter=original_field_name_delimiter,
-                special_field_name_prefix=special_field_name_prefix,
-                remove_special_field_name_prefix=remove_special_field_name_prefix,
-                capitalise_enum_members=capitalise_enum_members
-                if k == ModelType.ENUM
-                else False,
-                no_alias=no_alias,
-            )
-            for k, v in merged_field_name_resolver_classes.items()
-        }
-        self.class_name_generator = (
-            custom_class_name_generator or self.default_class_name_generator
-        )
-        self._base_path: Path = base_path or Path.cwd()
-        self._current_base_path: Optional[Path] = self._base_path
-
-    @property
-    def current_base_path(self) -> Optional[Path]:
-        return self._current_base_path
-
-    def set_current_base_path(self, base_path: Optional[Path]) -> None:
-        self._current_base_path = base_path
-
-    @property
-    def base_url(self) -> Optional[str]:
-        return self._base_url
-
-    def set_base_url(self, base_url: Optional[str]) -> None:
-        self._base_url = base_url
-
-    @contextmanager
-    def current_base_path_context(
-        self, base_path: Optional[Path]
-    ) -> Generator[None, None, None]:
-        if base_path:
-            base_path = (self._base_path / base_path).resolve()
-        with context_variable(
-            self.set_current_base_path, self.current_base_path, base_path
-        ):
-            yield
-
-    @contextmanager
-    def base_url_context(self, base_url: str) -> Generator[None, None, None]:
-        if self._base_url:
-            with context_variable(self.set_base_url, self.base_url, base_url):
-                yield
-        else:
-            yield
-
-    @property
-    def current_root(self) -> Sequence[str]:
-        if len(self._current_root) > 1:
-            return self._current_root
-        return self._current_root
-
-    def set_current_root(self, current_root: Sequence[str]) -> None:
-        self._current_root = current_root
-
-    @contextmanager
-    def current_root_context(
-        self, current_root: Sequence[str]
-    ) -> Generator[None, None, None]:
-        with context_variable(self.set_current_root, self.current_root, current_root):
-            yield
-
-    @property
-    def root_id(self) -> Optional[str]:
-        return self._root_id
-
-    @property
-    def root_id_base_path(self) -> Optional[str]:
-        return self._root_id_base_path
-
-    def set_root_id(self, root_id: Optional[str]) -> None:
-        if root_id and '/' in root_id:
-            self._root_id_base_path = root_id.rsplit('/', 1)[0]
-        else:
-            self._root_id_base_path = None
-
-        self._root_id = root_id
-
-    def add_id(self, id_: str, path: Sequence[str]) -> None:
-        self.ids['/'.join(self.current_root)][id_] = self.resolve_ref(path)
-
-    def resolve_ref(self, path: Union[Sequence[str], str]) -> str:
-        if isinstance(path, str):
-            joined_path = path
-        else:
-            joined_path = self.join_path(path)
-        if joined_path == '#':
-            return f"{'/'.join(self.current_root)}#"
-        if (
-            self.current_base_path
-            and not self.base_url
-            and joined_path[0] != '#'
-            and not is_url(joined_path)
-        ):
-            # resolve local file path
-            file_path, *object_part = joined_path.split('#', 1)
-            resolved_file_path = Path(self.current_base_path, file_path).resolve()
-            joined_path = get_relative_path(
-                self._base_path, resolved_file_path
-            ).as_posix()
-            if object_part:
-                joined_path += f'#{object_part[0]}'
-        if ID_PATTERN.match(joined_path):
-            ref: str = self.ids['/'.join(self.current_root)][joined_path]
-        else:
-            if '#' not in joined_path:
-                joined_path += '#'
-            elif joined_path[0] == '#':
-                joined_path = f'{"/".join(self.current_root)}{joined_path}'
-
-            delimiter = joined_path.index('#')
-            file_path = ''.join(joined_path[:delimiter])
-            ref = f"{''.join(joined_path[:delimiter])}#{''.join(joined_path[delimiter + 1:])}"
-            if self.root_id_base_path and not (
-                is_url(joined_path) or Path(self._base_path, file_path).is_file()
-            ):
-                ref = f'{self.root_id_base_path}/{ref}'
-
-        if self.base_url:
-            from .http import join_url
-
-            joined_url = join_url(self.base_url, ref)
-            if '#' in joined_url:
-                return joined_url
-            return f'{joined_url}#'
-
-        if is_url(ref):
-            file_part, path_part = ref.split('#', 1)
-            if file_part == self.root_id:
-                return f'{"/".join(self.current_root)}#{path_part}'
-            target_url: ParseResult = urlparse(file_part)
-            if not (self.root_id and self.current_base_path):
-                return ref
-            root_id_url: ParseResult = urlparse(self.root_id)
-            if (target_url.scheme, target_url.netloc) == (
-                root_id_url.scheme,
-                root_id_url.netloc,
-            ):  # pragma: no cover
-                target_url_path = Path(target_url.path)
-                relative_target_base = get_relative_path(
-                    Path(root_id_url.path).parent, target_url_path.parent
-                )
-                target_path = (
-                    self.current_base_path / relative_target_base / target_url_path.name
-                )
-                if target_path.exists():
-                    return f'{target_path.resolve().relative_to(self._base_path)}#{path_part}'
-
-        return ref
-
-    def is_after_load(self, ref: str) -> bool:
-        if is_url(ref) or not self.current_base_path:
-            return False
-        file_part, *_ = ref.split('#', 1)
-        absolute_path = Path(self._base_path, file_part).resolve().as_posix()
-        if self.is_external_root_ref(ref):
-            return absolute_path in self.after_load_files
-        elif self.is_external_ref(ref):
-            return absolute_path in self.after_load_files
-        return False  # pragma: no cover
-
-    @staticmethod
-    def is_external_ref(ref: str) -> bool:
-        return '#' in ref and ref[0] != '#'
-
-    @staticmethod
-    def is_external_root_ref(ref: str) -> bool:
-        return ref[-1] == '#'
-
-    @staticmethod
-    def join_path(path: Sequence[str]) -> str:
-        joined_path = '/'.join(p for p in path if p).replace('/#', '#')
-        if '#' not in joined_path:
-            joined_path += '#'
-        return joined_path
-
-    def add_ref(self, ref: str, resolved: bool = False) -> Reference:
-        if not resolved:
-            path = self.resolve_ref(ref)
-        else:
-            path = ref
-        reference = self.references.get(path)
-        if reference:
-            return reference
-        split_ref = ref.rsplit('/', 1)
-        if len(split_ref) == 1:
-            original_name = Path(
-                split_ref[0].rstrip('#')
-                if self.is_external_root_ref(path)
-                else split_ref[0]
-            ).stem
-        else:
-            original_name = (
-                Path(split_ref[1].rstrip('#')).stem
-                if self.is_external_root_ref(path)
-                else split_ref[1]
-            )
-        name = self.get_class_name(original_name, unique=False).name
-        reference = Reference(
-            path=path,
-            original_name=original_name,
-            name=name,
-            loaded=False,
-        )
-
-        self.references[path] = reference
-        return reference
-
-    def add(
-        self,
-        path: Sequence[str],
-        original_name: str,
-        *,
-        class_name: bool = False,
-        singular_name: bool = False,
-        unique: bool = True,
-        singular_name_suffix: Optional[str] = None,
-        loaded: bool = False,
-    ) -> Reference:
-        joined_path = self.join_path(path)
-        reference: Optional[Reference] = self.references.get(joined_path)
-        if reference:
-            if loaded and not reference.loaded:
-                reference.loaded = True
-            if (
-                not original_name
-                or original_name == reference.original_name
-                or original_name == reference.name
-            ):
-                return reference
-        name = original_name
-        duplicate_name: Optional[str] = None
-        if class_name:
-            name, duplicate_name = self.get_class_name(
-                name=name,
-                unique=unique,
-                reserved_name=reference.name if reference else None,
-                singular_name=singular_name,
-                singular_name_suffix=singular_name_suffix,
-            )
-        else:
-            # TODO: create a validate for module name
-            name = self.get_valid_field_name(name, model_type=ModelType.CLASS)
-            if singular_name:  # pragma: no cover
-                name = get_singular_name(
-                    name, singular_name_suffix or self.singular_name_suffix
-                )
-            elif unique:  # pragma: no cover
-                unique_name = self._get_unique_name(name)
-                if unique_name == name:
-                    duplicate_name = name
-                name = unique_name
-        if reference:
-            reference.original_name = original_name
-            reference.name = name
-            reference.loaded = loaded
-            reference.duplicate_name = duplicate_name
-        else:
-            reference = Reference(
-                path=joined_path,
-                original_name=original_name,
-                name=name,
-                loaded=loaded,
-                duplicate_name=duplicate_name,
-            )
-            self.references[joined_path] = reference
-        return reference
-
-    def get(self, path: Union[Sequence[str], str]) -> Optional[Reference]:
-        return self.references.get(self.resolve_ref(path))
-
-    def delete(self, path: Union[Sequence[str], str]) -> None:
-        if self.resolve_ref(path) in self.references:
-            del self.references[self.resolve_ref(path)]
-
-    def default_class_name_generator(self, name: str) -> str:
-        # TODO: create a validate for class name
-        return self.field_name_resolvers[ModelType.CLASS].get_valid_name(
-            name, ignore_snake_case_field=True, upper_camel=True
-        )
-
-    def get_class_name(
-        self,
-        name: str,
-        unique: bool = True,
-        reserved_name: Optional[str] = None,
-        singular_name: bool = False,
-        singular_name_suffix: Optional[str] = None,
-    ) -> ClassName:
-        if '.' in name:
-            split_name = name.split('.')
-            prefix = '.'.join(
-                # TODO: create a validate for class name
-                self.field_name_resolvers[ModelType.CLASS].get_valid_name(
-                    n, ignore_snake_case_field=True
-                )
-                for n in split_name[:-1]
-            )
-            prefix += '.'
-            class_name = split_name[-1]
-        else:
-            prefix = ''
-            class_name = name
-
-        class_name = self.class_name_generator(class_name)
-
-        if singular_name:
-            class_name = get_singular_name(
-                class_name, singular_name_suffix or self.singular_name_suffix
-            )
-        duplicate_name: Optional[str] = None
-        if unique:
-            if reserved_name == class_name:
-                return ClassName(name=class_name, duplicate_name=duplicate_name)
-
-            unique_name = self._get_unique_name(class_name, camel=True)
-            if unique_name != class_name:
-                duplicate_name = class_name
-            class_name = unique_name
-        return ClassName(name=f'{prefix}{class_name}', duplicate_name=duplicate_name)
-
-    def _get_unique_name(self, name: str, camel: bool = False) -> str:
-        unique_name: str = name
-        count: int = 1
-        reference_names = {
-            r.name for r in self.references.values()
-        } | self.exclude_names
-        while unique_name in reference_names:
-            if self.duplicate_name_suffix:
-                name_parts: List[Union[str, int]] = [
-                    name,
-                    self.duplicate_name_suffix,
-                    count - 1,
-                ]
-            else:
-                name_parts = [name, count]
-            delimiter = '' if camel else '_'
-            unique_name = delimiter.join(str(p) for p in name_parts if p)
-            count += 1
-        return unique_name
-
-    @classmethod
-    def validate_name(cls, name: str) -> bool:
-        return name.isidentifier() and not iskeyword(name)
-
-    def get_valid_field_name(
-        self,
-        name: str,
-        excludes: Optional[Set[str]] = None,
-        model_type: ModelType = ModelType.PYDANTIC,
-    ) -> str:
-        return self.field_name_resolvers[model_type].get_valid_name(name, excludes)
-
-    def get_valid_field_name_and_alias(
-        self,
-        field_name: str,
-        excludes: Optional[Set[str]] = None,
-        model_type: ModelType = ModelType.PYDANTIC,
-    ) -> Tuple[str, Optional[str]]:
-        return self.field_name_resolvers[model_type].get_valid_field_name_and_alias(
-            field_name, excludes
-        )
-
-
-@lru_cache()
-def get_singular_name(name: str, suffix: str = SINGULAR_NAME_SUFFIX) -> str:
-    singular_name = inflect_engine.singular_noun(name)
-    if singular_name is False:
-        singular_name = f'{name}{suffix}'
-    return singular_name
-
-
-@lru_cache()
-def snake_to_upper_camel(word: str, delimiter: str = '_') -> str:
-    prefix = ''
-    if word.startswith(delimiter):
-        prefix = '_'
-        word = word[1:]
-
-    return prefix + ''.join(x[0].upper() + x[1:] for x in word.split(delimiter) if x)
-
-
-def is_url(ref: str) -> bool:
-    return ref.startswith(('https://', 'http://'))
-
-
-inflect_engine = inflect.engine()
diff -pruN 0.26.4-3/datamodel_code_generator/types.py 0.34.0-1/datamodel_code_generator/types.py
--- 0.26.4-3/datamodel_code_generator/types.py	2024-12-15 17:25:57.706037000 +0000
+++ 0.34.0-1/datamodel_code_generator/types.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,638 +0,0 @@
-import re
-from abc import ABC, abstractmethod
-from enum import Enum, auto
-from functools import lru_cache
-from itertools import chain
-from typing import (
-    TYPE_CHECKING,
-    Any,
-    Callable,
-    ClassVar,
-    Dict,
-    FrozenSet,
-    Iterable,
-    Iterator,
-    List,
-    Optional,
-    Pattern,
-    Sequence,
-    Set,
-    Tuple,
-    Type,
-    TypeVar,
-    Union,
-)
-
-import pydantic
-from packaging import version
-from pydantic import StrictBool, StrictInt, StrictStr, create_model
-
-from datamodel_code_generator.format import DatetimeClassType, PythonVersion
-from datamodel_code_generator.imports import (
-    IMPORT_ABC_MAPPING,
-    IMPORT_ABC_SEQUENCE,
-    IMPORT_ABC_SET,
-    IMPORT_DICT,
-    IMPORT_FROZEN_SET,
-    IMPORT_LIST,
-    IMPORT_LITERAL,
-    IMPORT_LITERAL_BACKPORT,
-    IMPORT_MAPPING,
-    IMPORT_OPTIONAL,
-    IMPORT_SEQUENCE,
-    IMPORT_SET,
-    IMPORT_UNION,
-    Import,
-)
-from datamodel_code_generator.reference import Reference, _BaseModel
-from datamodel_code_generator.util import (
-    PYDANTIC_V2,
-    ConfigDict,
-    Protocol,
-    runtime_checkable,
-)
-
-if PYDANTIC_V2:
-    from pydantic import GetCoreSchemaHandler
-    from pydantic_core import core_schema
-
-T = TypeVar('T')
-
-OPTIONAL = 'Optional'
-OPTIONAL_PREFIX = f'{OPTIONAL}['
-
-UNION = 'Union'
-UNION_PREFIX = f'{UNION}['
-UNION_DELIMITER = ', '
-UNION_PATTERN: Pattern[str] = re.compile(r'\s*,\s*')
-UNION_OPERATOR_DELIMITER = ' | '
-UNION_OPERATOR_PATTERN: Pattern[str] = re.compile(r'\s*\|\s*')
-NONE = 'None'
-ANY = 'Any'
-LITERAL = 'Literal'
-SEQUENCE = 'Sequence'
-FROZEN_SET = 'FrozenSet'
-MAPPING = 'Mapping'
-DICT = 'Dict'
-SET = 'Set'
-LIST = 'List'
-STANDARD_DICT = 'dict'
-STANDARD_LIST = 'list'
-STANDARD_SET = 'set'
-STR = 'str'
-
-NOT_REQUIRED = 'NotRequired'
-NOT_REQUIRED_PREFIX = f'{NOT_REQUIRED}['
-
-
-class StrictTypes(Enum):
-    str = 'str'
-    bytes = 'bytes'
-    int = 'int'
-    float = 'float'
-    bool = 'bool'
-
-
-class UnionIntFloat:
-    def __init__(self, value: Union[int, float]) -> None:
-        self.value: Union[int, float] = value
-
-    def __int__(self) -> int:
-        return int(self.value)
-
-    def __float__(self) -> float:
-        return float(self.value)
-
-    def __str__(self) -> str:
-        return str(self.value)
-
-    @classmethod
-    def __get_validators__(cls) -> Iterator[Callable[[Any], Any]]:
-        yield cls.validate
-
-    @classmethod
-    def __get_pydantic_core_schema__(
-        cls, _source_type: Any, _handler: 'GetCoreSchemaHandler'
-    ) -> 'core_schema.CoreSchema':
-        from_int_schema = core_schema.chain_schema(
-            [
-                core_schema.union_schema(
-                    [core_schema.int_schema(), core_schema.float_schema()]
-                ),
-                core_schema.no_info_plain_validator_function(cls.validate),
-            ]
-        )
-
-        return core_schema.json_or_python_schema(
-            json_schema=from_int_schema,
-            python_schema=core_schema.union_schema(
-                [
-                    # check if it's an instance first before doing any further work
-                    core_schema.is_instance_schema(UnionIntFloat),
-                    from_int_schema,
-                ]
-            ),
-            serialization=core_schema.plain_serializer_function_ser_schema(
-                lambda instance: instance.value
-            ),
-        )
-
-    @classmethod
-    def validate(cls, v: Any) -> 'UnionIntFloat':
-        if isinstance(v, UnionIntFloat):
-            return v
-        elif not isinstance(v, (int, float)):  # pragma: no cover
-            try:
-                int(v)
-                return cls(v)
-            except (TypeError, ValueError):
-                pass
-            try:
-                float(v)
-                return cls(v)
-            except (TypeError, ValueError):
-                pass
-
-            raise TypeError(f'{v} is not int or float')
-        return cls(v)
-
-
-def chain_as_tuple(*iterables: Iterable[T]) -> Tuple[T, ...]:
-    return tuple(chain(*iterables))
-
-
-@lru_cache()
-def _remove_none_from_type(
-    type_: str, split_pattern: Pattern[str], delimiter: str
-) -> List[str]:
-    types: List[str] = []
-    split_type: str = ''
-    inner_count: int = 0
-    for part in re.split(split_pattern, type_):
-        if part == NONE:
-            continue
-        inner_count += part.count('[') - part.count(']')
-        if split_type:
-            split_type += delimiter
-        if inner_count == 0:
-            if split_type:
-                types.append(f'{split_type}{part}')
-            else:
-                types.append(part)
-            split_type = ''
-            continue
-        else:
-            split_type += part
-    return types
-
-
-def _remove_none_from_union(type_: str, use_union_operator: bool) -> str:
-    if use_union_operator:
-        if not re.match(r'^\w+ | ', type_):
-            return type_
-        return UNION_OPERATOR_DELIMITER.join(
-            _remove_none_from_type(
-                type_, UNION_OPERATOR_PATTERN, UNION_OPERATOR_DELIMITER
-            )
-        )
-
-    if not type_.startswith(UNION_PREFIX):
-        return type_
-    inner_types = _remove_none_from_type(
-        type_[len(UNION_PREFIX) :][:-1], UNION_PATTERN, UNION_DELIMITER
-    )
-
-    if len(inner_types) == 1:
-        return inner_types[0]
-    return f'{UNION_PREFIX}{UNION_DELIMITER.join(inner_types)}]'
-
-
-@lru_cache()
-def get_optional_type(type_: str, use_union_operator: bool) -> str:
-    type_ = _remove_none_from_union(type_, use_union_operator)
-
-    if not type_ or type_ == NONE:
-        return NONE
-    if use_union_operator:
-        return f'{type_} | {NONE}'
-    return f'{OPTIONAL_PREFIX}{type_}]'
-
-
-@runtime_checkable
-class Modular(Protocol):
-    @property
-    def module_name(self) -> str:
-        raise NotImplementedError
-
-
-@runtime_checkable
-class Nullable(Protocol):
-    @property
-    def nullable(self) -> bool:
-        raise NotImplementedError
-
-
-class DataType(_BaseModel):
-    if PYDANTIC_V2:
-        # TODO[pydantic]: The following keys were removed: `copy_on_model_validation`.
-        # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
-        model_config = ConfigDict(
-            extra='forbid',
-            revalidate_instances='never',
-        )
-    else:
-        if not TYPE_CHECKING:
-
-            @classmethod
-            def model_rebuild(cls) -> None:
-                cls.update_forward_refs()
-
-        class Config:
-            extra = 'forbid'
-            copy_on_model_validation = (
-                False
-                if version.parse(pydantic.VERSION) < version.parse('1.9.2')
-                else 'none'
-            )
-
-    type: Optional[str] = None
-    reference: Optional[Reference] = None
-    data_types: List['DataType'] = []
-    is_func: bool = False
-    kwargs: Optional[Dict[str, Any]] = None
-    import_: Optional[Import] = None
-    python_version: PythonVersion = PythonVersion.PY_38
-    is_optional: bool = False
-    is_dict: bool = False
-    is_list: bool = False
-    is_set: bool = False
-    is_custom_type: bool = False
-    literals: List[Union[StrictBool, StrictInt, StrictStr]] = []
-    use_standard_collections: bool = False
-    use_generic_container: bool = False
-    use_union_operator: bool = False
-    alias: Optional[str] = None
-    parent: Optional[Any] = None
-    children: List[Any] = []
-    strict: bool = False
-    dict_key: Optional['DataType'] = None
-
-    _exclude_fields: ClassVar[Set[str]] = {'parent', 'children'}
-    _pass_fields: ClassVar[Set[str]] = {'parent', 'children', 'data_types', 'reference'}
-
-    @classmethod
-    def from_import(
-        cls: Type['DataTypeT'],
-        import_: Import,
-        *,
-        is_optional: bool = False,
-        is_dict: bool = False,
-        is_list: bool = False,
-        is_set: bool = False,
-        is_custom_type: bool = False,
-        strict: bool = False,
-        kwargs: Optional[Dict[str, Any]] = None,
-    ) -> 'DataTypeT':
-        return cls(
-            type=import_.import_,
-            import_=import_,
-            is_optional=is_optional,
-            is_dict=is_dict,
-            is_list=is_list,
-            is_set=is_set,
-            is_func=True if kwargs else False,
-            is_custom_type=is_custom_type,
-            strict=strict,
-            kwargs=kwargs,
-        )
-
-    @property
-    def unresolved_types(self) -> FrozenSet[str]:
-        return frozenset(
-            {
-                t.reference.path
-                for data_types in self.data_types
-                for t in data_types.all_data_types
-                if t.reference
-            }
-            | ({self.reference.path} if self.reference else set())
-        )
-
-    def replace_reference(self, reference: Optional[Reference]) -> None:
-        if not self.reference:  # pragma: no cover
-            raise Exception(
-                f"`{self.__class__.__name__}.replace_reference()` can't be called"
-                f' when `reference` field is empty.'
-            )
-        self_id = id(self)
-        self.reference.children = [
-            c for c in self.reference.children if id(c) != self_id
-        ]
-        self.reference = reference
-        if reference:
-            reference.children.append(self)
-
-    def remove_reference(self) -> None:
-        self.replace_reference(None)
-
-    @property
-    def module_name(self) -> Optional[str]:
-        if self.reference and isinstance(self.reference.source, Modular):
-            return self.reference.source.module_name
-        return None  # pragma: no cover
-
-    @property
-    def full_name(self) -> str:
-        module_name = self.module_name
-        if module_name:
-            return f'{module_name}.{self.reference.short_name}'  # type: ignore
-        return self.reference.short_name  # type: ignore
-
-    @property
-    def all_data_types(self) -> Iterator['DataType']:
-        for data_type in self.data_types:
-            yield from data_type.all_data_types
-        yield self
-
-    @property
-    def all_imports(self) -> Iterator[Import]:
-        for data_type in self.data_types:
-            yield from data_type.all_imports
-        yield from self.imports
-
-    @property
-    def imports(self) -> Iterator[Import]:
-        if self.import_:
-            yield self.import_
-        imports: Tuple[Tuple[bool, Import], ...] = (
-            (self.is_optional and not self.use_union_operator, IMPORT_OPTIONAL),
-            (len(self.data_types) > 1 and not self.use_union_operator, IMPORT_UNION),
-        )
-        if any(self.literals):
-            import_literal = (
-                IMPORT_LITERAL
-                if self.python_version.has_literal_type
-                else IMPORT_LITERAL_BACKPORT
-            )
-            imports = (
-                *imports,
-                (any(self.literals), import_literal),
-            )
-
-        if self.use_generic_container:
-            if self.use_standard_collections:
-                imports = (
-                    *imports,
-                    (self.is_list, IMPORT_ABC_SEQUENCE),
-                    (self.is_set, IMPORT_ABC_SET),
-                    (self.is_dict, IMPORT_ABC_MAPPING),
-                )
-            else:
-                imports = (
-                    *imports,
-                    (self.is_list, IMPORT_SEQUENCE),
-                    (self.is_set, IMPORT_FROZEN_SET),
-                    (self.is_dict, IMPORT_MAPPING),
-                )
-        elif not self.use_standard_collections:
-            imports = (
-                *imports,
-                (self.is_list, IMPORT_LIST),
-                (self.is_set, IMPORT_SET),
-                (self.is_dict, IMPORT_DICT),
-            )
-        for field, import_ in imports:
-            if field and import_ != self.import_:
-                yield import_
-
-        if self.dict_key:
-            yield from self.dict_key.imports
-
-    def __init__(self, **values: Any) -> None:
-        if not TYPE_CHECKING:
-            super().__init__(**values)
-
-        for type_ in self.data_types:
-            if type_.type == ANY and type_.is_optional:
-                if any(t for t in self.data_types if t.type != ANY):  # pragma: no cover
-                    self.is_optional = True
-                    self.data_types = [
-                        t
-                        for t in self.data_types
-                        if not (t.type == ANY and t.is_optional)
-                    ]
-                break  # pragma: no cover
-
-        for data_type in self.data_types:
-            if data_type.reference or data_type.data_types:
-                data_type.parent = self
-
-        if self.reference:
-            self.reference.children.append(self)
-
-    @property
-    def type_hint(self) -> str:
-        type_: Optional[str] = self.alias or self.type
-        if not type_:
-            if self.is_union:
-                data_types: List[str] = []
-                for data_type in self.data_types:
-                    data_type_type = data_type.type_hint
-                    if data_type_type in data_types:  # pragma: no cover
-                        continue
-
-                    if NONE == data_type_type:
-                        self.is_optional = True
-                        continue
-
-                    non_optional_data_type_type = _remove_none_from_union(
-                        data_type_type, self.use_union_operator
-                    )
-
-                    if non_optional_data_type_type != data_type_type:
-                        self.is_optional = True
-
-                    data_types.append(non_optional_data_type_type)
-                if len(data_types) == 1:
-                    type_ = data_types[0]
-                else:
-                    if self.use_union_operator:
-                        type_ = UNION_OPERATOR_DELIMITER.join(data_types)
-                    else:
-                        type_ = f'{UNION_PREFIX}{UNION_DELIMITER.join(data_types)}]'
-            elif len(self.data_types) == 1:
-                type_ = self.data_types[0].type_hint
-            elif self.literals:
-                type_ = f"{LITERAL}[{', '.join(repr(literal) for literal in self.literals)}]"
-            else:
-                if self.reference:
-                    type_ = self.reference.short_name
-                else:
-                    # TODO support strict Any
-                    # type_ = 'Any'
-                    type_ = ''
-        if self.reference:
-            source = self.reference.source
-            if isinstance(source, Nullable) and source.nullable:
-                self.is_optional = True
-        if self.reference and self.python_version == PythonVersion.PY_36:
-            type_ = f"'{type_}'"
-        if self.is_list:
-            if self.use_generic_container:
-                list_ = SEQUENCE
-            elif self.use_standard_collections:
-                list_ = STANDARD_LIST
-            else:
-                list_ = LIST
-            type_ = f'{list_}[{type_}]' if type_ else list_
-        elif self.is_set:
-            if self.use_generic_container:
-                set_ = FROZEN_SET
-            elif self.use_standard_collections:
-                set_ = STANDARD_SET
-            else:
-                set_ = SET
-            type_ = f'{set_}[{type_}]' if type_ else set_
-        elif self.is_dict:
-            if self.use_generic_container:
-                dict_ = MAPPING
-            elif self.use_standard_collections:
-                dict_ = STANDARD_DICT
-            else:
-                dict_ = DICT
-            if self.dict_key or type_:
-                key = self.dict_key.type_hint if self.dict_key else STR
-                type_ = f'{dict_}[{key}, {type_ or ANY}]'
-            else:  # pragma: no cover
-                type_ = dict_
-        if self.is_optional and type_ != ANY:
-            return get_optional_type(type_, self.use_union_operator)
-        elif self.is_func:
-            if self.kwargs:
-                kwargs: str = ', '.join(f'{k}={v}' for k, v in self.kwargs.items())
-                return f'{type_}({kwargs})'
-            return f'{type_}()'
-        return type_
-
-    @property
-    def is_union(self) -> bool:
-        return len(self.data_types) > 1
-
-
-DataType.model_rebuild()
-
-DataTypeT = TypeVar('DataTypeT', bound=DataType)
-
-
-class EmptyDataType(DataType):
-    pass
-
-
-class Types(Enum):
-    integer = auto()
-    int32 = auto()
-    int64 = auto()
-    number = auto()
-    float = auto()
-    double = auto()
-    decimal = auto()
-    time = auto()
-    string = auto()
-    byte = auto()
-    binary = auto()
-    date = auto()
-    date_time = auto()
-    timedelta = auto()
-    password = auto()
-    path = auto()
-    email = auto()
-    uuid = auto()
-    uuid1 = auto()
-    uuid2 = auto()
-    uuid3 = auto()
-    uuid4 = auto()
-    uuid5 = auto()
-    uri = auto()
-    hostname = auto()
-    ipv4 = auto()
-    ipv4_network = auto()
-    ipv6 = auto()
-    ipv6_network = auto()
-    boolean = auto()
-    object = auto()
-    null = auto()
-    array = auto()
-    any = auto()
-
-
-class DataTypeManager(ABC):
-    def __init__(
-        self,
-        python_version: PythonVersion = PythonVersion.PY_38,
-        use_standard_collections: bool = False,
-        use_generic_container_types: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        use_union_operator: bool = False,
-        use_pendulum: bool = False,
-        target_datetime_class: Optional[DatetimeClassType] = None,
-    ) -> None:
-        self.python_version = python_version
-        self.use_standard_collections: bool = use_standard_collections
-        self.use_generic_container_types: bool = use_generic_container_types
-        self.strict_types: Sequence[StrictTypes] = strict_types or ()
-        self.use_non_positive_negative_number_constrained_types: bool = (
-            use_non_positive_negative_number_constrained_types
-        )
-        self.use_union_operator: bool = use_union_operator
-        self.use_pendulum: bool = use_pendulum
-        self.target_datetime_class: DatetimeClassType = target_datetime_class
-
-        if (
-            use_generic_container_types and python_version == PythonVersion.PY_36
-        ):  # pragma: no cover
-            raise Exception(
-                'use_generic_container_types can not be used with target_python_version 3.6.\n'
-                ' The version will be not supported in a future version'
-            )
-
-        if TYPE_CHECKING:
-            self.data_type: Type[DataType]
-        else:
-            self.data_type: Type[DataType] = create_model(
-                'ContextDataType',
-                python_version=(PythonVersion, python_version),
-                use_standard_collections=(bool, use_standard_collections),
-                use_generic_container=(bool, use_generic_container_types),
-                use_union_operator=(bool, use_union_operator),
-                __base__=DataType,
-            )
-
-    @abstractmethod
-    def get_data_type(self, types: Types, **kwargs: Any) -> DataType:
-        raise NotImplementedError
-
-    def get_data_type_from_full_path(
-        self, full_path: str, is_custom_type: bool
-    ) -> DataType:
-        return self.data_type.from_import(
-            Import.from_full_path(full_path), is_custom_type=is_custom_type
-        )
-
-    def get_data_type_from_value(self, value: Any) -> DataType:
-        type_: Optional[Types] = None
-        if isinstance(value, str):
-            type_ = Types.string
-        elif isinstance(value, bool):
-            type_ = Types.boolean
-        elif isinstance(value, int):
-            type_ = Types.integer
-        elif isinstance(value, float):
-            type_ = Types.float
-        elif isinstance(value, dict):
-            return self.data_type.from_import(IMPORT_DICT)
-        elif isinstance(value, list):
-            return self.data_type.from_import(IMPORT_LIST)
-        else:
-            type_ = Types.any
-        return self.get_data_type(type_)
diff -pruN 0.26.4-3/datamodel_code_generator/util.py 0.34.0-1/datamodel_code_generator/util.py
--- 0.26.4-3/datamodel_code_generator/util.py	2024-12-15 17:25:57.706037000 +0000
+++ 0.34.0-1/datamodel_code_generator/util.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,106 +0,0 @@
-from __future__ import annotations
-
-import copy
-from functools import cached_property  # noqa: F401
-from pathlib import Path
-from typing import (  # noqa: F401
-    TYPE_CHECKING,
-    Any,
-    Callable,
-    Dict,
-    Protocol,
-    TypeVar,
-    runtime_checkable,
-)
-
-import pydantic
-from packaging import version
-from pydantic import BaseModel as _BaseModel
-
-PYDANTIC_VERSION = version.parse(
-    pydantic.VERSION if isinstance(pydantic.VERSION, str) else str(pydantic.VERSION)
-)
-
-PYDANTIC_V2: bool = PYDANTIC_VERSION >= version.parse('2.0b3')
-
-if TYPE_CHECKING:
-    from typing import Literal
-
-    from yaml import SafeLoader
-
-    def load_toml(path: Path) -> Dict[str, Any]: ...
-
-else:
-    try:
-        from yaml import CSafeLoader as SafeLoader
-    except ImportError:  # pragma: no cover
-        from yaml import SafeLoader
-
-    try:
-        import tomllib
-
-        def load_toml(path: Path) -> Dict[str, Any]:
-            with path.open('rb') as f:
-                return tomllib.load(f)
-
-    except ImportError:
-        import toml
-
-        def load_toml(path: Path) -> Dict[str, Any]:
-            return toml.load(path)
-
-
-SafeLoaderTemp = copy.deepcopy(SafeLoader)
-SafeLoaderTemp.yaml_constructors = copy.deepcopy(SafeLoader.yaml_constructors)
-SafeLoaderTemp.add_constructor(
-    'tag:yaml.org,2002:timestamp',
-    SafeLoaderTemp.yaml_constructors['tag:yaml.org,2002:str'],
-)
-SafeLoader = SafeLoaderTemp
-
-Model = TypeVar('Model', bound=_BaseModel)
-
-
-def model_validator(
-    mode: Literal['before', 'after'] = 'after',
-) -> Callable[[Callable[[Model, Any], Any]], Callable[[Model, Any], Any]]:
-    def inner(method: Callable[[Model, Any], Any]) -> Callable[[Model, Any], Any]:
-        if PYDANTIC_V2:
-            from pydantic import model_validator as model_validator_v2
-
-            return model_validator_v2(mode=mode)(method)  # type: ignore
-        else:
-            from pydantic import root_validator
-
-            return root_validator(method, pre=mode == 'before')  # type: ignore
-
-    return inner
-
-
-def field_validator(
-    field_name: str,
-    *fields: str,
-    mode: Literal['before', 'after'] = 'after',
-) -> Callable[[Any], Callable[[Model, Any], Any]]:
-    def inner(method: Callable[[Model, Any], Any]) -> Callable[[Model, Any], Any]:
-        if PYDANTIC_V2:
-            from pydantic import field_validator as field_validator_v2
-
-            return field_validator_v2(field_name, *fields, mode=mode)(method)  # type: ignore
-        else:
-            from pydantic import validator
-
-            return validator(field_name, *fields, pre=mode == 'before')(method)  # type: ignore
-
-    return inner
-
-
-if PYDANTIC_V2:
-    from pydantic import ConfigDict as ConfigDict
-else:
-    ConfigDict = dict  # type: ignore
-
-
-class BaseModel(_BaseModel):
-    if PYDANTIC_V2:
-        model_config = ConfigDict(strict=False)
diff -pruN 0.26.4-3/datamodel_code_generator/version.py 0.34.0-1/datamodel_code_generator/version.py
--- 0.26.4-3/datamodel_code_generator/version.py	2024-12-15 17:26:17.175006400 +0000
+++ 0.34.0-1/datamodel_code_generator/version.py	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-version: str = '0.26.4'
diff -pruN 0.26.4-3/debian/changelog 0.34.0-1/debian/changelog
--- 0.26.4-3/debian/changelog	2025-01-30 12:29:15.000000000 +0000
+++ 0.34.0-1/debian/changelog	2025-09-30 07:41:25.000000000 +0000
@@ -1,3 +1,25 @@
+python-datamodel-code-generator (0.34.0-1) unstable; urgency=medium
+
+  * Use watch v5.
+  * New upstream version 0.34.0
+  * Drop Rules-Requires-Root: no.
+  * Use Salsa CI for licenserecon.
+
+ -- Simon Josefsson <simon@josefsson.org>  Tue, 30 Sep 2025 09:41:25 +0200
+
+python-datamodel-code-generator (0.33.0-1) unstable; urgency=medium
+
+  * Fix watch URL.
+  * New upstream version 0.33.0
+  * Drop upstreamed isort-6.patch.
+  * Adapt PYTHONPATH for source path move.
+  * Mark datamodel-codegen Multi-Arch: foreign.
+  * Standards-Version: 4.7.2.
+  * Add build deps.
+  * Disable python-prance tests.
+
+ -- Simon Josefsson <simon@josefsson.org>  Tue, 19 Aug 2025 00:03:03 +0200
+
 python-datamodel-code-generator (0.26.4-3) unstable; urgency=medium
 
   * Team upload.
diff -pruN 0.26.4-3/debian/control 0.34.0-1/debian/control
--- 0.26.4-3/debian/control	2025-01-30 12:29:15.000000000 +0000
+++ 0.34.0-1/debian/control	2025-09-30 07:41:25.000000000 +0000
@@ -3,7 +3,7 @@ Maintainer: Debian Python Team <team+pyt
 Uploaders:
  Simon Josefsson <simon@josefsson.org>,
 Priority: optional
-Standards-Version: 4.7.0
+Standards-Version: 4.7.2
 Section: python
 Homepage: https://github.com/koxudaxi/datamodel-code-generator
 Build-Depends:
@@ -14,16 +14,23 @@ Build-Depends:
  pybuild-plugin-pyproject,
  python3-all,
  python3-argcomplete,
+ python3-freezegun <!nocheck>,
+ python3-genson <!nocheck>,
+ python3-graphql-core <!nocheck>,
+ python3-hatchling,
+ python3-httpx <!nocheck>,
  python3-isort,
  python3-jinja2,
  python3-poetry-core,
  python3-pydantic,
+ python3-pytest <!nocheck>,
+ python3-pytest-benchmark <!nocheck>,
+ python3-pytest-mock <!nocheck>,
  python3-setuptools,
  python3-yaml,
 Vcs-Git: https://salsa.debian.org/python-team/packages/python-datamodel-code-generator.git
 Vcs-Browser: https://salsa.debian.org/python-team/packages/python-datamodel-code-generator
 Testsuite: autopkgtest-pkg-pybuild
-Rules-Requires-Root: no
 
 Package: python3-datamodel-code-generator
 Architecture: all
@@ -41,6 +48,7 @@ Description: pydantic code generator fro
 Package: datamodel-codegen
 Section: devel
 Architecture: all
+Multi-Arch: foreign
 Depends:
  ${misc:Depends},
  ${python3:Depends},
diff -pruN 0.26.4-3/debian/patches/isort-6.patch 0.34.0-1/debian/patches/isort-6.patch
--- 0.26.4-3/debian/patches/isort-6.patch	2025-01-30 12:29:15.000000000 +0000
+++ 0.34.0-1/debian/patches/isort-6.patch	1970-01-01 00:00:00.000000000 +0000
@@ -1,27 +0,0 @@
-From: Colin Watson <cjwatson@debian.org>
-Date: Thu, 30 Jan 2025 12:21:14 +0000
-Subject: Support isort 6
-
-The breaking changes in isort 6 are just to remove support for Python
-3.8 (see https://github.com/PyCQA/isort/releases), so there's no need
-for this project to treat it differently from isort 5.
-
-Forwarded: https://github.com/koxudaxi/datamodel-code-generator/pull/2289
-Last-Update: 2025-01-30
----
- pyproject.toml | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/pyproject.toml b/pyproject.toml
-index 921fee8..a2c2b28 100644
---- a/pyproject.toml
-+++ b/pyproject.toml
-@@ -52,7 +52,7 @@ argcomplete = ">=1.10,<4.0"
- jinja2 = ">=2.10.1,<4.0"
- inflect = ">=4.1.0,<6.0"
- black = ">=19.10b0"
--isort = ">=4.3.21,<6.0"
-+isort = ">=4.3.21,<7.0"
- genson = ">=1.2.1,<2.0"
- packaging = "*"
- prance = { version = ">=0.18.2", optional = true }
diff -pruN 0.26.4-3/debian/patches/series 0.34.0-1/debian/patches/series
--- 0.26.4-3/debian/patches/series	2025-01-30 12:29:15.000000000 +0000
+++ 0.34.0-1/debian/patches/series	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-isort-6.patch
diff -pruN 0.26.4-3/debian/rules 0.34.0-1/debian/rules
--- 0.26.4-3/debian/rules	2025-01-30 12:29:15.000000000 +0000
+++ 0.34.0-1/debian/rules	2025-09-30 07:41:25.000000000 +0000
@@ -2,6 +2,9 @@
 
 include /usr/share/dpkg/pkg-info.mk # DEB_VERSION
 
+# python-prance
+export PYBUILD_TEST_ARGS= -k 'not test_validation and not test_validation_failed and not test_openapi_special_yaml_keywords and not test_openapi_parser_parse_remote_ref'
+
 %:
 	dh $@ --buildsystem=pybuild
 
@@ -11,7 +14,7 @@ M = $(CURDIR)/debian/tmp/usr/share/man/m
 execute_after_dh_auto_install:
 ifeq (,$(filter nodoc,$(DEB_BUILD_PROFILES)))
 	mkdir -pv $(M)
-	env PYTHONPATH=$(CURDIR) \
+	env PYTHONPATH=$(CURDIR)/src \
 		help2man --no-info --version-string="$(DEB_VERSION)" \
 		--help-option="--no-color --help" \
 		-Idebian/datamodel-codegen.h2m \
diff -pruN 0.26.4-3/debian/salsa-ci.yml 0.34.0-1/debian/salsa-ci.yml
--- 0.26.4-3/debian/salsa-ci.yml	2025-01-30 12:29:15.000000000 +0000
+++ 0.34.0-1/debian/salsa-ci.yml	2025-09-30 07:41:25.000000000 +0000
@@ -1,11 +1,11 @@
 include:
 - https://salsa.debian.org/salsa-ci-team/pipeline/raw/master/recipes/debian.yml
-- https://salsa.debian.org/debian/licenserecon/raw/main/debian/licenserecon.yml
 
 variables:
+  SALSA_CI_AUTOPKGTEST_ALLOWED_EXIT_STATUS: '0'
   SALSA_CI_DISABLE_APTLY: 0
+  SALSA_CI_ENABLE_LICENSERECON: 1
   SALSA_CI_ENABLE_WRAP_AND_SORT: '1'
-  SALSA_CI_WRAP_AND_SORT_ARGS: '-asbkt'
-  SALSA_CI_AUTOPKGTEST_ALLOWED_EXIT_STATUS: '0'
   SALSA_CI_LINTIAN_FAIL_WARNING: '1'
   SALSA_CI_LINTIAN_SUPPRESS_TAGS: 'orig-tarball-missing-upstream-signature'
+  SALSA_CI_WRAP_AND_SORT_ARGS: '-asbkt'
diff -pruN 0.26.4-3/debian/watch 0.34.0-1/debian/watch
--- 0.26.4-3/debian/watch	2025-01-30 12:29:15.000000000 +0000
+++ 0.34.0-1/debian/watch	2025-09-30 07:41:25.000000000 +0000
@@ -1,3 +1,4 @@
-version=4
-opts=uversionmangle=s/(rc|a|b|c)/~$1/ \
-https://pypi.debian.net/datamodel-code-generator/datamodel-code-generator-(.+)\.(?:zip|tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz)))
+Version: 5
+Source: https://pypi.debian.net/datamodel-code-generator/
+Matching-Pattern: datamodel_code_generator-@ANY_VERSION@@ARCHIVE_EXT@
+Uversionmangle: s/(rc|a|b|c)/~$1/
diff -pruN 0.26.4-3/pyproject.toml 0.34.0-1/pyproject.toml
--- 0.26.4-3/pyproject.toml	2024-12-15 17:26:17.175006400 +0000
+++ 0.34.0-1/pyproject.toml	2025-09-30 07:37:47.000000000 +0000
@@ -1,164 +1,225 @@
-[tool.poetry]
+[build-system]
+build-backend = "hatchling.build"
+requires = [
+  "hatch-vcs>=0.4",
+  "hatchling>=1.25",
+]
+
+[project]
 name = "datamodel-code-generator"
-version = "0.26.4"
 description = "Datamodel Code Generator"
-authors = ["Koudai Aono <koxudaxi@gmail.com>"]
-readme = "README.md"
+readme.content-type = "text/markdown"
+readme.file = "README.md"
 license = "MIT"
-homepage = "https://github.com/koxudaxi/datamodel-code-generator"
-repository = "https://github.com/koxudaxi/datamodel-code-generator"
-
-
+authors = [ { name = "Koudai Aono", email = "koxudaxi@gmail.com" } ]
+requires-python = ">=3.9"
 classifiers = [
-        "Development Status :: 4 - Beta",
-        "Natural Language :: English",
-        "License :: OSI Approved :: MIT License",
-        "Programming Language :: Python :: 3",
-        "Programming Language :: Python :: 3.8",
-        "Programming Language :: Python :: 3.9",
-        "Programming Language :: Python :: 3.10",
-        "Programming Language :: Python :: 3.11",
-        "Programming Language :: Python :: 3.12",
-        "Programming Language :: Python :: 3.13",
-        "Programming Language :: Python :: Implementation :: CPython"]
-
-[build-system]
-requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"]
-build-backend = "poetry.core.masonry.api"
+  "Development Status :: 4 - Beta",
+  "License :: OSI Approved :: MIT License",
+  "Natural Language :: English",
+  "Programming Language :: Python :: 3 :: Only",
+  "Programming Language :: Python :: 3.9",
+  "Programming Language :: Python :: 3.10",
+  "Programming Language :: Python :: 3.11",
+  "Programming Language :: Python :: 3.12",
+  "Programming Language :: Python :: 3.13",
+  "Programming Language :: Python :: Implementation :: CPython",
+]
+dynamic = [
+  "version",
+]
+dependencies = [
+  "argcomplete>=2.10.1,<4",
+  "black>=19.10b0",
+  "genson>=1.2.1,<2",
+  "inflect>=4.1,<8",
+  "isort>=4.3.21,<7",
+  "jinja2>=2.10.1,<4",
+  "packaging",
+  "pydantic>=1.5",
+  "pyyaml>=6.0.1",
+  "tomli>=2.2.1,<3; python_version<='3.11'",
+]
+optional-dependencies.all = [
+  "datamodel-code-generator[debug]",
+  "datamodel-code-generator[graphql]",
+  "datamodel-code-generator[http]",
+  "datamodel-code-generator[ruff]",
+  "datamodel-code-generator[validation]",
+]
+optional-dependencies.debug = [
+  "pysnooper>=0.4.1,<2",
+]
+optional-dependencies.graphql = [
+  "graphql-core>=3.2.3",
+]
+optional-dependencies.http = [
+  "httpx>=0.24.1",
+]
+optional-dependencies.ruff = [
+  "ruff>=0.9.10",
+]
+optional-dependencies.validation = [
+  "openapi-spec-validator>=0.2.8,<0.7",
+  "prance>=0.18.2",
+]
+urls.Homepage = "https://github.com/koxudaxi/datamodel-code-generator"
+urls.Source = "https://github.com/koxudaxi/datamodel-code-generator"
+scripts.datamodel-codegen = "datamodel_code_generator.__main__:main"
+
+[dependency-groups]
+dev = [
+  { include-group = "coverage" },
+  { include-group = "docs" },
+  { include-group = "fix" },
+  { include-group = "pkg-meta" },
+  { include-group = "test" },
+  { include-group = "type" },
+]
+test = [
+  "freezegun",
+  "pytest>=6.1",
+  "pytest>=8.3.4",
+  "pytest-benchmark",
+  "pytest-codspeed>=2.2",
+  "pytest-cov>=2.12.1",
+  "pytest-cov>=5",
+  "pytest-mock>=3.14",
+  "pytest-xdist>=3.3.1",
+  "setuptools; python_version<'3.10'", # PyCharm debugger needs it
+  { include-group = "coverage" },
+]
+type = [
+  "pyright>=1.1.393",
+  "types-jinja2",
+  "types-pyyaml",
+  "types-setuptools>=67.6.0.5,<70",
+  "types-toml",
+  { include-group = "test" },
+]
+docs = [
+  "mkdocs>=1.6",
+  "mkdocs-material>=9.5.31",
+]
+black22 = [ "black==22.1" ]
+black23 = [ "black==23.12" ]
+black24 = [ "black==24.1" ]
+fix = [ "pre-commit-uv>=4.1.4" ]
+pkg-meta = [ "check-wheel-contents>=0.6.1", "twine>=6.1", "uv>=0.5.22" ]
+coverage = [
+  "covdefaults>=2.3",
+  "coverage[toml]>=7.6.1",
+  "diff-cover>=7.7",
+]
 
-[tool.poetry-dynamic-versioning]
-enable = false
-vcs = "git"
-# language=RegExp
-pattern = '^(?P<base>\d+\.\d+\.\d+)(-?((?P<stage>[a-zA-Z]+)\.?(?P<revision>\d+)?))?$'
-
-[tool.poetry-dynamic-versioning.substitution]
-files = ["*/version.py"]
-patterns = ["(^version: str = ')[^']*(')"]
-
-
-[tool.poetry.scripts]
-datamodel-codegen = "datamodel_code_generator.__main__:main"
-
-[tool.poetry.dependencies]
-python = "^3.8"
-pydantic =  [
-    {extras = ["email"], version = ">=1.5.1,<3.0,!=2.4.0", python = "<3.10"},
-    {extras = ["email"], version = ">=1.9.0,<3.0,!=2.4.0", python = "~3.10"},
-    {extras = ["email"], version = ">=1.10.0,<3.0,!=2.4.0", python = "^3.11"},
-    {extras = ["email"], version = ">=1.10.0,!=2.0.0,!=2.0.1,<3.0,!=2.4.0", python = "^3.12"}
-]
-argcomplete = ">=1.10,<4.0"
-jinja2 = ">=2.10.1,<4.0"
-inflect = ">=4.1.0,<6.0"
-black = ">=19.10b0"
-isort = ">=4.3.21,<6.0"
-genson = ">=1.2.1,<2.0"
-packaging = "*"
-prance = { version = ">=0.18.2", optional = true }
-openapi-spec-validator = { version = ">=0.2.8,<0.7.0", optional = true }
-toml = { version = ">=0.10.0,<1.0.0", python = "<3.11" }
-PySnooper = { version = ">=0.4.1,<2.0.0", optional = true }
-httpx = { version = "*", optional = true }
-pyyaml = ">=6.0.1"
-graphql-core = {version = "^3.2.3", optional = true}
-
-[tool.poetry.group.dev.dependencies]
-pytest = ">6.1"
-pytest-benchmark = "*"
-pytest-cov = ">=2.12.1"
-pytest-mock = "*"
-mypy = ">=1.4.1,<1.5.0"
-black = ">=23.3,<25.0"
-freezegun = "*"
-types-Jinja2 = "*"
-types-PyYAML = "*"
-types-toml = "*"
-types-setuptools = ">=67.6.0.5,<70.0.0.0"
-pydantic = "*"
-httpx = ">=0.24.1"
-PySnooper = "*"
-ruff = ">=0.0.290,<0.7.5"
-ruff-lsp = ">=0.0.39,<0.0.60"
-pre-commit = "*"
-pytest-xdist = "^3.3.1"
-prance = "*"
-openapi-spec-validator = "*"
-pytest-codspeed = "^2.2.0"
-
-
-[tool.poetry.extras]
-http = ["httpx"]
-graphql = ["graphql-core"]
-debug = ["PySnooper"]
-validation = ["prance", "openapi-spec-validator"]
+[tool.hatch]
+build.dev-mode-dirs = [ "src" ]
+build.targets.sdist.include = [
+  "/src",
+  "/tests",
+]
+version.source = "vcs"
 
 [tool.ruff]
-line-length = 88
-extend-select = ['Q', 'RUF100', 'C4', 'UP', 'I']
-flake8-quotes = {inline-quotes = 'single', multiline-quotes = 'double'}
-target-version = 'py37'
-ignore = ['E501', 'UP006', 'UP007', 'Q000', 'Q003' ]
-extend-exclude = ['tests/data']
-
-[tool.ruff.format]
-quote-style = "single"
-indent-style = "space"
-skip-magic-trailing-comma = false
-line-ending = "auto"
-
-[tool.mypy]
-plugins = "pydantic.mypy"
-
-ignore_missing_imports = true
-follow_imports = "silent"
-strict_optional = true
-warn_redundant_casts = true
-warn_unused_ignores = true
-disallow_any_generics = true
-check_untyped_defs = true
-no_implicit_reexport = true
-disallow_untyped_defs = true
-
-[tool.pydantic-mypy]
-init_forbid_extra = true
-init_typed = true
-warn_required_dynamic_aliases = false
-warn_untyped_fields = true
+line-length = 120
+extend-exclude = [ "tests/data" ]
+format.preview = true
+format.docstring-code-format = true
+lint.select = [
+  "ALL",
+]
+lint.ignore = [
+  "ANN401", # Any as type annotation is allowed
+  "C901",   # complex structure
+  "COM812", # Conflict with formatter
+  "CPY",    # No copyright statements
+  "D",      # limited documentation
+  "DOC",    # limited documentation
+  "FIX002", # line contains to do
+  "ISC001", # Conflict with formatter
+  "S101",   # can use assert
+  "TD002",  # missing to do author
+  "TD003",  # missing to do link
+  "TD004",  # missing colon in to do
+]
+lint.per-file-ignores."tests/**/*.py" = [
+  "FBT",     # don't care about booleans as positional arguments in tests
+  "INP001",  # no implicit namespace
+  "PLC2701", # private import is fine
+  "PLR0913", # as many arguments as want
+  "PLR0915", # can have longer test methods
+  "PLR0917", # as many arguments as want
+  "PLR2004", # Magic value used in comparison, consider replacing with a constant variable
+  "S",       # no safety concerns
+  "SLF001",  # can test private methods
+]
+lint.isort = { known-first-party = [
+  "datamodel_code_generator",
+  "tests",
+], required-imports = [
+  "from __future__ import annotations",
+] }
+
+lint.preview = true
+
+[tool.codespell]
+skip = '.git,*.lock,tests'
 
 [tool.pytest.ini_options]
-filterwarnings = "ignore::DeprecationWarning:distutils"
+filterwarnings = [
+  "error",
+  "ignore:^.*The `parse_obj` method is deprecated; use `model_validate` instead.*",
+  "ignore:^.*The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.*",
+  "ignore:^.*The `dict` method is deprecated; use `model_dump` instead.*",
+  "ignore:^.*The `copy` method is deprecated; use `model_copy` instead.*",
+  "ignore:^.*`--validation` option is deprecated.*",
+  "ignore:^.*Field name `name` is duplicated on Pet.*",
+  "ignore:^.*format of 'unknown-type' not understood for 'string' - using default.*",
+  "ignore:^.*unclosed file.*",
+  "ignore:^.*black doesn't support `experimental-string-processing` option for wrapping string literal in .*",
+  "ignore:^.*jsonschema.exceptions.RefResolutionError is deprecated as of version 4.18.0. If you wish to catch potential reference resolution errors, directly catch referencing.exceptions.Unresolvable..*",
+  "ignore:^.*`experimental string processing` has been included in `preview` and deprecated. Use `preview` instead..*",
+]
 norecursedirs = "tests/data/*"
+verbosity_assertions = 2
 
-[tool.coverage.run]
-source = ["datamodel_code_generator"]
-branch = true
-omit = ["scripts/*"]
-
-
-[tool.coverage.report]
-ignore_errors = true
-exclude_lines = [
-    "if self.debug:",
-    "pragma: no cover",
-    "raise NotImplementedError",
-    "if __name__ == .__main__.:",
-    "if TYPE_CHECKING:",
-    "if not TYPE_CHECKING:"]
-
-omit = ["tests/*"]
+[tool.coverage]
+html.skip_covered = false
+html.show_contexts = false
+paths.source = [
+  "src",
+  ".tox*/*/lib/python*/site-packages",
+  ".tox*\\*\\Lib\\site-packages",
+  "*/src",
+  "*\\src",
+]
+paths.other = [
+  ".",
+  "*/datamodel-code-generator",
+  "*\\datamodel-code-generator",
+]
+run.dynamic_context = "none"
+run.omit = [ "tests/data/*" ]
+report.fail_under = 88
+run.parallel = true
+run.plugins = [
+  "covdefaults",
+]
+covdefaults.subtract_omit = "*/__main__.py"
 
+[tool.pyright]
+reportPrivateImportUsage = false
 
 [tool.pydantic-pycharm-plugin]
 ignore-init-method-arguments = true
+parsable-types.str = [ "int", "float" ]
 
-[tool.pydantic-pycharm-plugin.parsable-types]
-# str field may parse int and float
-str = ["int", "float"]
-
-[tool.codespell]
-# Ref: https://github.com/codespell-project/codespell#using-a-config-file
-skip = '.git,*.lock,tests'
-# check-hidden = true
-# ignore-regex = ''
-# ignore-words-list = ''
+[tool.uv]
+conflicts = [
+  [
+    { group = "black24" },
+    { group = "black22" },
+    { group = "black23" },
+    { group = "dev" },
+  ],
+]
diff -pruN 0.26.4-3/src/datamodel_code_generator/__init__.py 0.34.0-1/src/datamodel_code_generator/__init__.py
--- 0.26.4-3/src/datamodel_code_generator/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,601 @@
+from __future__ import annotations
+
+import contextlib
+import os
+import sys
+from collections.abc import Iterator, Mapping, Sequence
+from datetime import datetime, timezone
+from enum import Enum
+from pathlib import Path
+from typing import (
+    IO,
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Final,
+    TextIO,
+    TypeVar,
+    cast,
+)
+from urllib.parse import ParseResult
+
+import yaml
+import yaml.parser
+
+import datamodel_code_generator.pydantic_patch  # noqa: F401
+from datamodel_code_generator.format import (
+    DEFAULT_FORMATTERS,
+    DatetimeClassType,
+    Formatter,
+    PythonVersion,
+    PythonVersionMin,
+)
+from datamodel_code_generator.parser import DefaultPutDict, LiteralType
+from datamodel_code_generator.util import SafeLoader
+
+MIN_VERSION: Final[int] = 9
+MAX_VERSION: Final[int] = 13
+
+T = TypeVar("T")
+
+try:
+    import pysnooper
+
+    pysnooper.tracer.DISABLED = True
+except ImportError:  # pragma: no cover
+    pysnooper = None
+
+DEFAULT_BASE_CLASS: str = "pydantic.BaseModel"
+
+
+def load_yaml(stream: str | TextIO) -> Any:
+    return yaml.load(stream, Loader=SafeLoader)  # noqa: S506
+
+
+def load_yaml_from_path(path: Path, encoding: str) -> Any:
+    with path.open(encoding=encoding) as f:
+        return load_yaml(f)
+
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+
+    from datamodel_code_generator.model.pydantic_v2 import UnionMode
+    from datamodel_code_generator.parser.base import Parser
+    from datamodel_code_generator.types import StrictTypes
+
+    def get_version() -> str: ...
+
+else:
+
+    def get_version() -> str:
+        package = "datamodel-code-generator"
+
+        from importlib.metadata import version  # noqa: PLC0415
+
+        return version(package)
+
+
+def enable_debug_message() -> None:  # pragma: no cover
+    if not pysnooper:
+        msg = "Please run `$pip install 'datamodel-code-generator[debug]'` to use debug option"
+        raise Exception(msg)  # noqa: TRY002
+
+    pysnooper.tracer.DISABLED = False
+
+
+DEFAULT_MAX_VARIABLE_LENGTH: int = 100
+
+
+def snooper_to_methods() -> Callable[..., Any]:
+    def inner(cls: type[T]) -> type[T]:
+        if not pysnooper:
+            return cls
+        import inspect  # noqa: PLC0415
+
+        methods = inspect.getmembers(cls, predicate=inspect.isfunction)
+        for name, method in methods:
+            snooper_method = pysnooper.snoop(max_variable_length=DEFAULT_MAX_VARIABLE_LENGTH)(method)
+            setattr(cls, name, snooper_method)
+        return cls
+
+    return inner
+
+
+@contextlib.contextmanager
+def chdir(path: Path | None) -> Iterator[None]:
+    """Changes working directory and returns to previous on exit."""
+
+    if path is None:
+        yield
+    else:
+        prev_cwd = Path.cwd()
+        try:
+            os.chdir(path if path.is_dir() else path.parent)
+            yield
+        finally:
+            os.chdir(prev_cwd)
+
+
+def is_openapi(data: dict) -> bool:
+    return "openapi" in data
+
+
+JSON_SCHEMA_URLS: tuple[str, ...] = (
+    "http://json-schema.org/",
+    "https://json-schema.org/",
+)
+
+
+def is_schema(data: dict) -> bool:
+    schema = data.get("$schema")
+    if isinstance(schema, str) and any(schema.startswith(u) for u in JSON_SCHEMA_URLS):  # pragma: no cover
+        return True
+    if isinstance(data.get("type"), str):
+        return True
+    if any(
+        isinstance(data.get(o), list)
+        for o in (
+            "allOf",
+            "anyOf",
+            "oneOf",
+        )
+    ):
+        return True
+    return isinstance(data.get("properties"), dict)
+
+
+class InputFileType(Enum):
+    Auto = "auto"
+    OpenAPI = "openapi"
+    JsonSchema = "jsonschema"
+    Json = "json"
+    Yaml = "yaml"
+    Dict = "dict"
+    CSV = "csv"
+    GraphQL = "graphql"
+
+
+RAW_DATA_TYPES: list[InputFileType] = [
+    InputFileType.Json,
+    InputFileType.Yaml,
+    InputFileType.Dict,
+    InputFileType.CSV,
+    InputFileType.GraphQL,
+]
+
+
+class DataModelType(Enum):
+    PydanticBaseModel = "pydantic.BaseModel"
+    PydanticV2BaseModel = "pydantic_v2.BaseModel"
+    DataclassesDataclass = "dataclasses.dataclass"
+    TypingTypedDict = "typing.TypedDict"
+    MsgspecStruct = "msgspec.Struct"
+
+
+class OpenAPIScope(Enum):
+    Schemas = "schemas"
+    Paths = "paths"
+    Tags = "tags"
+    Parameters = "parameters"
+
+
+class GraphQLScope(Enum):
+    Schema = "schema"
+
+
+class Error(Exception):
+    def __init__(self, message: str) -> None:
+        self.message: str = message
+
+    def __str__(self) -> str:
+        return self.message
+
+
+class InvalidClassNameError(Error):
+    def __init__(self, class_name: str) -> None:
+        self.class_name = class_name
+        message = f"title={class_name!r} is invalid class name."
+        super().__init__(message=message)
+
+
+def get_first_file(path: Path) -> Path:  # pragma: no cover
+    if path.is_file():
+        return path
+    if path.is_dir():
+        for child in path.rglob("*"):
+            if child.is_file():
+                return child
+    msg = f"No file found in: {path}"
+    raise FileNotFoundError(msg)
+
+
+def generate(  # noqa: PLR0912, PLR0913, PLR0914, PLR0915
+    input_: Path | str | ParseResult | Mapping[str, Any],
+    *,
+    input_filename: str | None = None,
+    input_file_type: InputFileType = InputFileType.Auto,
+    output: Path | None = None,
+    output_model_type: DataModelType = DataModelType.PydanticBaseModel,
+    target_python_version: PythonVersion = PythonVersionMin,
+    base_class: str = "",
+    additional_imports: list[str] | None = None,
+    custom_template_dir: Path | None = None,
+    extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+    validation: bool = False,
+    field_constraints: bool = False,
+    snake_case_field: bool = False,
+    strip_default_none: bool = False,
+    aliases: Mapping[str, str] | None = None,
+    disable_timestamp: bool = False,
+    enable_version_header: bool = False,
+    allow_population_by_field_name: bool = False,
+    allow_extra_fields: bool = False,
+    extra_fields: str | None = None,
+    apply_default_values_for_required_fields: bool = False,
+    force_optional_for_required_fields: bool = False,
+    class_name: str | None = None,
+    use_standard_collections: bool = False,
+    use_schema_description: bool = False,
+    use_field_description: bool = False,
+    use_default_kwarg: bool = False,
+    reuse_model: bool = False,
+    encoding: str = "utf-8",
+    enum_field_as_literal: LiteralType | None = None,
+    use_one_literal_as_default: bool = False,
+    set_default_enum_member: bool = False,
+    use_subclass_enum: bool = False,
+    strict_nullable: bool = False,
+    use_generic_container_types: bool = False,
+    enable_faux_immutability: bool = False,
+    disable_appending_item_suffix: bool = False,
+    strict_types: Sequence[StrictTypes] | None = None,
+    empty_enum_field_name: str | None = None,
+    custom_class_name_generator: Callable[[str], str] | None = None,
+    field_extra_keys: set[str] | None = None,
+    field_include_all_keys: bool = False,
+    field_extra_keys_without_x_prefix: set[str] | None = None,
+    openapi_scopes: list[OpenAPIScope] | None = None,
+    include_path_parameters: bool = False,
+    graphql_scopes: list[GraphQLScope] | None = None,  # noqa: ARG001
+    wrap_string_literal: bool | None = None,
+    use_title_as_name: bool = False,
+    use_operation_id_as_name: bool = False,
+    use_unique_items_as_set: bool = False,
+    http_headers: Sequence[tuple[str, str]] | None = None,
+    http_ignore_tls: bool = False,
+    use_annotated: bool = False,
+    use_non_positive_negative_number_constrained_types: bool = False,
+    original_field_name_delimiter: str | None = None,
+    use_double_quotes: bool = False,
+    use_union_operator: bool = False,
+    collapse_root_models: bool = False,
+    special_field_name_prefix: str | None = None,
+    remove_special_field_name_prefix: bool = False,
+    capitalise_enum_members: bool = False,
+    keep_model_order: bool = False,
+    custom_file_header: str | None = None,
+    custom_file_header_path: Path | None = None,
+    custom_formatters: list[str] | None = None,
+    custom_formatters_kwargs: dict[str, Any] | None = None,
+    use_pendulum: bool = False,
+    http_query_parameters: Sequence[tuple[str, str]] | None = None,
+    treat_dot_as_module: bool = False,
+    use_exact_imports: bool = False,
+    union_mode: UnionMode | None = None,
+    output_datetime_class: DatetimeClassType | None = None,
+    keyword_only: bool = False,
+    frozen_dataclasses: bool = False,
+    no_alias: bool = False,
+    formatters: list[Formatter] = DEFAULT_FORMATTERS,
+    parent_scoped_naming: bool = False,
+    disable_future_imports: bool = False,
+) -> None:
+    remote_text_cache: DefaultPutDict[str, str] = DefaultPutDict()
+    if isinstance(input_, str):
+        input_text: str | None = input_
+    elif isinstance(input_, ParseResult):
+        from datamodel_code_generator.http import get_body  # noqa: PLC0415
+
+        input_text = remote_text_cache.get_or_put(
+            input_.geturl(),
+            default_factory=lambda url: get_body(url, http_headers, http_ignore_tls, http_query_parameters),
+        )
+    else:
+        input_text = None
+
+    if isinstance(input_, Path) and not input_.is_absolute():
+        input_ = input_.expanduser().resolve()
+    if input_file_type == InputFileType.Auto:
+        try:
+            input_text_ = (
+                get_first_file(input_).read_text(encoding=encoding) if isinstance(input_, Path) else input_text
+            )
+        except FileNotFoundError as exc:
+            msg = "File not found"
+            raise Error(msg) from exc
+
+        try:
+            assert isinstance(input_text_, str)
+            input_file_type = infer_input_type(input_text_)
+        except Exception as exc:
+            msg = "Invalid file format"
+            raise Error(msg) from exc
+        else:
+            print(  # noqa: T201
+                inferred_message.format(input_file_type.value),
+                file=sys.stderr,
+            )
+
+    kwargs: dict[str, Any] = {}
+    if input_file_type == InputFileType.OpenAPI:  # noqa: PLR1702
+        from datamodel_code_generator.parser.openapi import OpenAPIParser  # noqa: PLC0415
+
+        parser_class: type[Parser] = OpenAPIParser
+        kwargs["openapi_scopes"] = openapi_scopes
+        kwargs["include_path_parameters"] = include_path_parameters
+    elif input_file_type == InputFileType.GraphQL:
+        from datamodel_code_generator.parser.graphql import GraphQLParser  # noqa: PLC0415
+
+        parser_class: type[Parser] = GraphQLParser
+    else:
+        from datamodel_code_generator.parser.jsonschema import JsonSchemaParser  # noqa: PLC0415
+
+        parser_class = JsonSchemaParser
+
+        if input_file_type in RAW_DATA_TYPES:
+            import json  # noqa: PLC0415
+
+            try:
+                if isinstance(input_, Path) and input_.is_dir():  # pragma: no cover
+                    msg = f"Input must be a file for {input_file_type}"
+                    raise Error(msg)  # noqa: TRY301
+                obj: dict[Any, Any]
+                if input_file_type == InputFileType.CSV:
+                    import csv  # noqa: PLC0415
+
+                    def get_header_and_first_line(csv_file: IO[str]) -> dict[str, Any]:
+                        csv_reader = csv.DictReader(csv_file)
+                        assert csv_reader.fieldnames is not None
+                        return dict(zip(csv_reader.fieldnames, next(csv_reader)))
+
+                    if isinstance(input_, Path):
+                        with input_.open(encoding=encoding) as f:
+                            obj = get_header_and_first_line(f)
+                    else:
+                        import io  # noqa: PLC0415
+
+                        obj = get_header_and_first_line(io.StringIO(input_text))
+                elif input_file_type == InputFileType.Yaml:
+                    if isinstance(input_, Path):
+                        obj = load_yaml(input_.read_text(encoding=encoding))
+                    else:
+                        assert input_text is not None
+                        obj = load_yaml(input_text)
+                elif input_file_type == InputFileType.Json:
+                    if isinstance(input_, Path):
+                        obj = json.loads(input_.read_text(encoding=encoding))
+                    else:
+                        assert input_text is not None
+                        obj = json.loads(input_text)
+                elif input_file_type == InputFileType.Dict:
+                    import ast  # noqa: PLC0415
+
+                    # Input can be a dict object stored in a python file
+                    obj = (
+                        ast.literal_eval(input_.read_text(encoding=encoding))
+                        if isinstance(input_, Path)
+                        else cast("dict[Any, Any]", input_)
+                    )
+                else:  # pragma: no cover
+                    msg = f"Unsupported input file type: {input_file_type}"
+                    raise Error(msg)  # noqa: TRY301
+            except Exception as exc:
+                msg = "Invalid file format"
+                raise Error(msg) from exc
+
+            from genson import SchemaBuilder  # noqa: PLC0415
+
+            builder = SchemaBuilder()
+            builder.add_object(obj)
+            input_text = json.dumps(builder.to_schema())
+
+    if isinstance(input_, ParseResult) and input_file_type not in RAW_DATA_TYPES:
+        input_text = None
+
+    if union_mode is not None:
+        if output_model_type == DataModelType.PydanticV2BaseModel:
+            default_field_extras = {"union_mode": union_mode}
+        else:  # pragma: no cover
+            msg = "union_mode is only supported for pydantic_v2.BaseModel"
+            raise Error(msg)
+    else:
+        default_field_extras = None
+
+    from datamodel_code_generator.model import get_data_model_types  # noqa: PLC0415
+
+    data_model_types = get_data_model_types(output_model_type, target_python_version)
+    source = input_text or input_
+    assert not isinstance(source, Mapping)
+    parser = parser_class(
+        source=source,
+        data_model_type=data_model_types.data_model,
+        data_model_root_type=data_model_types.root_model,
+        data_model_field_type=data_model_types.field_model,
+        data_type_manager_type=data_model_types.data_type_manager,
+        base_class=base_class,
+        additional_imports=additional_imports,
+        custom_template_dir=custom_template_dir,
+        extra_template_data=extra_template_data,
+        target_python_version=target_python_version,
+        dump_resolve_reference_action=data_model_types.dump_resolve_reference_action,
+        validation=validation,
+        field_constraints=field_constraints,
+        snake_case_field=snake_case_field,
+        strip_default_none=strip_default_none,
+        aliases=aliases,
+        allow_population_by_field_name=allow_population_by_field_name,
+        allow_extra_fields=allow_extra_fields,
+        extra_fields=extra_fields,
+        apply_default_values_for_required_fields=apply_default_values_for_required_fields,
+        force_optional_for_required_fields=force_optional_for_required_fields,
+        class_name=class_name,
+        use_standard_collections=use_standard_collections,
+        base_path=input_.parent if isinstance(input_, Path) and input_.is_file() else None,
+        use_schema_description=use_schema_description,
+        use_field_description=use_field_description,
+        use_default_kwarg=use_default_kwarg,
+        reuse_model=reuse_model,
+        enum_field_as_literal=LiteralType.All
+        if output_model_type == DataModelType.TypingTypedDict
+        else enum_field_as_literal,
+        use_one_literal_as_default=use_one_literal_as_default,
+        set_default_enum_member=True
+        if output_model_type == DataModelType.DataclassesDataclass
+        else set_default_enum_member,
+        use_subclass_enum=use_subclass_enum,
+        strict_nullable=strict_nullable,
+        use_generic_container_types=use_generic_container_types,
+        enable_faux_immutability=enable_faux_immutability,
+        remote_text_cache=remote_text_cache,
+        disable_appending_item_suffix=disable_appending_item_suffix,
+        strict_types=strict_types,
+        empty_enum_field_name=empty_enum_field_name,
+        custom_class_name_generator=custom_class_name_generator,
+        field_extra_keys=field_extra_keys,
+        field_include_all_keys=field_include_all_keys,
+        field_extra_keys_without_x_prefix=field_extra_keys_without_x_prefix,
+        wrap_string_literal=wrap_string_literal,
+        use_title_as_name=use_title_as_name,
+        use_operation_id_as_name=use_operation_id_as_name,
+        use_unique_items_as_set=use_unique_items_as_set,
+        http_headers=http_headers,
+        http_ignore_tls=http_ignore_tls,
+        use_annotated=use_annotated,
+        use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
+        original_field_name_delimiter=original_field_name_delimiter,
+        use_double_quotes=use_double_quotes,
+        use_union_operator=use_union_operator,
+        collapse_root_models=collapse_root_models,
+        special_field_name_prefix=special_field_name_prefix,
+        remove_special_field_name_prefix=remove_special_field_name_prefix,
+        capitalise_enum_members=capitalise_enum_members,
+        keep_model_order=keep_model_order,
+        known_third_party=data_model_types.known_third_party,
+        custom_formatters=custom_formatters,
+        custom_formatters_kwargs=custom_formatters_kwargs,
+        use_pendulum=use_pendulum,
+        http_query_parameters=http_query_parameters,
+        treat_dot_as_module=treat_dot_as_module,
+        use_exact_imports=use_exact_imports,
+        default_field_extras=default_field_extras,
+        target_datetime_class=output_datetime_class,
+        keyword_only=keyword_only,
+        frozen_dataclasses=frozen_dataclasses,
+        no_alias=no_alias,
+        formatters=formatters,
+        encoding=encoding,
+        parent_scoped_naming=parent_scoped_naming,
+        **kwargs,
+    )
+
+    with chdir(output):
+        results = parser.parse(disable_future_imports=disable_future_imports)
+    if not input_filename:  # pragma: no cover
+        if isinstance(input_, str):
+            input_filename = "<stdin>"
+        elif isinstance(input_, ParseResult):
+            input_filename = input_.geturl()
+        elif input_file_type == InputFileType.Dict:
+            # input_ might be a dict object provided directly, and missing a name field
+            input_filename = getattr(input_, "name", "<dict>")
+        else:
+            assert isinstance(input_, Path)
+            input_filename = input_.name
+    if not results:
+        msg = "Models not found in the input data"
+        raise Error(msg)
+    if isinstance(results, str):
+        modules = {output: (results, input_filename)}
+    else:
+        if output is None:
+            msg = "Modular references require an output directory"
+            raise Error(msg)
+        if output.suffix:
+            msg = "Modular references require an output directory, not a file"
+            raise Error(msg)
+        modules = {
+            output.joinpath(*name): (
+                result.body,
+                str(result.source.as_posix() if result.source else input_filename),
+            )
+            for name, result in sorted(results.items())
+        }
+
+    timestamp = datetime.now(timezone.utc).replace(microsecond=0).isoformat()
+
+    if custom_file_header is None and custom_file_header_path:
+        custom_file_header = custom_file_header_path.read_text(encoding=encoding)
+
+    header = """\
+# generated by datamodel-codegen:
+#   filename:  {}"""
+    if not disable_timestamp:
+        header += f"\n#   timestamp: {timestamp}"
+    if enable_version_header:
+        header += f"\n#   version:   {get_version()}"
+
+    file: IO[Any] | None
+    for path, (body, filename) in modules.items():
+        if path is None:
+            file = None
+        else:
+            if not path.parent.exists():
+                path.parent.mkdir(parents=True)
+            file = path.open("wt", encoding=encoding)
+
+        safe_filename = filename.replace("\n", " ").replace("\r", " ") if filename else ""
+        print(custom_file_header or header.format(safe_filename), file=file)
+        if body:
+            print(file=file)
+            print(body.rstrip(), file=file)
+
+        if file is not None:
+            file.close()
+
+
+def infer_input_type(text: str) -> InputFileType:
+    try:
+        data = load_yaml(text)
+    except yaml.parser.ParserError:
+        return InputFileType.CSV
+    if isinstance(data, dict):
+        if is_openapi(data):
+            return InputFileType.OpenAPI
+        if is_schema(data):
+            return InputFileType.JsonSchema
+        return InputFileType.Json
+    msg = (
+        "Can't infer input file type from the input data. "
+        "Please specify the input file type explicitly with --input-file-type option."
+    )
+    raise Error(msg)
+
+
+inferred_message = (
+    "The input file type was determined to be: {}\nThis can be specified explicitly with the "
+    "`--input-file-type` option."
+)
+
+__all__ = [
+    "MAX_VERSION",
+    "MIN_VERSION",
+    "DatetimeClassType",
+    "DefaultPutDict",
+    "Error",
+    "InputFileType",
+    "InvalidClassNameError",
+    "LiteralType",
+    "PythonVersion",
+    "generate",
+]
diff -pruN 0.26.4-3/src/datamodel_code_generator/__main__.py 0.34.0-1/src/datamodel_code_generator/__main__.py
--- 0.26.4-3/src/datamodel_code_generator/__main__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/__main__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,599 @@
+"""
+Main function.
+"""
+
+from __future__ import annotations
+
+import json
+import signal
+import sys
+import warnings
+from collections import defaultdict
+from collections.abc import Sequence  # noqa: TC003  # pydantic needs it
+from enum import IntEnum
+from io import TextIOBase
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, ClassVar, Optional, Union, cast
+from urllib.parse import ParseResult, urlparse
+
+import argcomplete
+import black
+from pydantic import BaseModel
+
+if TYPE_CHECKING:
+    from argparse import Namespace
+
+    from typing_extensions import Self
+
+from datamodel_code_generator import (
+    DataModelType,
+    Error,
+    InputFileType,
+    InvalidClassNameError,
+    OpenAPIScope,
+    enable_debug_message,
+    generate,
+)
+from datamodel_code_generator.arguments import DEFAULT_ENCODING, arg_parser, namespace
+from datamodel_code_generator.format import (
+    DEFAULT_FORMATTERS,
+    DatetimeClassType,
+    Formatter,
+    PythonVersion,
+    PythonVersionMin,
+    is_supported_in_black,
+)
+from datamodel_code_generator.model.pydantic_v2 import UnionMode  # noqa: TC001 # needed for pydantic
+from datamodel_code_generator.parser import LiteralType  # noqa: TC001 # needed for pydantic
+from datamodel_code_generator.reference import is_url
+from datamodel_code_generator.types import StrictTypes  # noqa: TC001 # needed for pydantic
+from datamodel_code_generator.util import (
+    PYDANTIC_V2,
+    ConfigDict,
+    Model,
+    field_validator,
+    load_toml,
+    model_validator,
+)
+
+
+class Exit(IntEnum):
+    """Exit reasons."""
+
+    OK = 0
+    ERROR = 1
+    KeyboardInterrupt = 2
+
+
+def sig_int_handler(_: int, __: Any) -> None:  # pragma: no cover
+    sys.exit(Exit.OK)
+
+
+signal.signal(signal.SIGINT, sig_int_handler)
+
+
+class Config(BaseModel):
+    if PYDANTIC_V2:
+        model_config = ConfigDict(arbitrary_types_allowed=True)  # pyright: ignore[reportAssignmentType]
+
+        def get(self, item: str) -> Any:
+            return getattr(self, item)
+
+        def __getitem__(self, item: str) -> Any:
+            return self.get(item)
+
+        if TYPE_CHECKING:
+
+            @classmethod
+            def get_fields(cls) -> dict[str, Any]: ...
+
+        else:
+
+            @classmethod
+            def parse_obj(cls: type[Model], obj: Any) -> Model:
+                return cls.model_validate(obj)
+
+            @classmethod
+            def get_fields(cls) -> dict[str, Any]:
+                return cls.model_fields
+
+    else:
+
+        class Config:
+            # Pydantic 1.5.1 doesn't support validate_assignment correctly
+            arbitrary_types_allowed = (TextIOBase,)
+
+        if not TYPE_CHECKING:
+
+            @classmethod
+            def get_fields(cls) -> dict[str, Any]:
+                return cls.__fields__
+
+    @field_validator("aliases", "extra_template_data", "custom_formatters_kwargs", mode="before")
+    def validate_file(cls, value: Any) -> TextIOBase | None:  # noqa: N805
+        if value is None or isinstance(value, TextIOBase):
+            return value
+        return cast("TextIOBase", Path(value).expanduser().resolve().open("rt"))
+
+    @field_validator(
+        "input",
+        "output",
+        "custom_template_dir",
+        "custom_file_header_path",
+        mode="before",
+    )
+    def validate_path(cls, value: Any) -> Path | None:  # noqa: N805
+        if value is None or isinstance(value, Path):
+            return value  # pragma: no cover
+        return Path(value).expanduser().resolve()
+
+    @field_validator("url", mode="before")
+    def validate_url(cls, value: Any) -> ParseResult | None:  # noqa: N805
+        if isinstance(value, str) and is_url(value):  # pragma: no cover
+            return urlparse(value)
+        if value is None:  # pragma: no cover
+            return None
+        msg = f"This protocol doesn't support only http/https. --input={value}"
+        raise Error(msg)  # pragma: no cover
+
+    # Pydantic 1.5.1 doesn't support each_item=True correctly
+    @field_validator("http_headers", mode="before")
+    def validate_http_headers(cls, value: Any) -> list[tuple[str, str]] | None:  # noqa: N805
+        def validate_each_item(each_item: Any) -> tuple[str, str]:
+            if isinstance(each_item, str):  # pragma: no cover
+                try:
+                    field_name, field_value = each_item.split(":", maxsplit=1)
+                    return field_name, field_value.lstrip()
+                except ValueError as exc:
+                    msg = f"Invalid http header: {each_item!r}"
+                    raise Error(msg) from exc
+            return each_item  # pragma: no cover
+
+        if isinstance(value, list):
+            return [validate_each_item(each_item) for each_item in value]
+        return value  # pragma: no cover
+
+    @field_validator("http_query_parameters", mode="before")
+    def validate_http_query_parameters(cls, value: Any) -> list[tuple[str, str]] | None:  # noqa: N805
+        def validate_each_item(each_item: Any) -> tuple[str, str]:
+            if isinstance(each_item, str):  # pragma: no cover
+                try:
+                    field_name, field_value = each_item.split("=", maxsplit=1)
+                    return field_name, field_value.lstrip()
+                except ValueError as exc:
+                    msg = f"Invalid http query parameter: {each_item!r}"
+                    raise Error(msg) from exc
+            return each_item  # pragma: no cover
+
+        if isinstance(value, list):
+            return [validate_each_item(each_item) for each_item in value]
+        return value  # pragma: no cover
+
+    @model_validator(mode="before")
+    def validate_additional_imports(cls, values: dict[str, Any]) -> dict[str, Any]:  # noqa: N805
+        additional_imports = values.get("additional_imports")
+        if additional_imports is not None:
+            values["additional_imports"] = additional_imports.split(",")
+        return values
+
+    @model_validator(mode="before")
+    def validate_custom_formatters(cls, values: dict[str, Any]) -> dict[str, Any]:  # noqa: N805
+        custom_formatters = values.get("custom_formatters")
+        if custom_formatters is not None:
+            values["custom_formatters"] = custom_formatters.split(",")
+        return values
+
+    __validate_output_datetime_class_err: ClassVar[str] = (
+        '`--output-datetime-class` only allows "datetime" for '
+        f"`--output-model-type` {DataModelType.DataclassesDataclass.value}"
+    )
+
+    __validate_original_field_name_delimiter_err: ClassVar[str] = (
+        "`--original-field-name-delimiter` can not be used without `--snake-case-field`."
+    )
+
+    __validate_custom_file_header_err: ClassVar[str] = (
+        "`--custom_file_header_path` can not be used with `--custom_file_header`."
+    )
+    __validate_keyword_only_err: ClassVar[str] = (
+        f"`--keyword-only` requires `--target-python-version` {PythonVersion.PY_310.value} or higher."
+    )
+
+    if PYDANTIC_V2:
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_output_datetime_class(self: Self) -> Self:  # pyright: ignore[reportRedeclaration]
+            datetime_class_type: DatetimeClassType | None = self.output_datetime_class
+            if (
+                datetime_class_type
+                and datetime_class_type is not DatetimeClassType.Datetime
+                and self.output_model_type == DataModelType.DataclassesDataclass
+            ):
+                raise Error(self.__validate_output_datetime_class_err)
+            return self
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_original_field_name_delimiter(self: Self) -> Self:  # pyright: ignore[reportRedeclaration]
+            if self.original_field_name_delimiter is not None and not self.snake_case_field:
+                raise Error(self.__validate_original_field_name_delimiter_err)
+            return self
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_custom_file_header(self: Self) -> Self:  # pyright: ignore[reportRedeclaration]
+            if self.custom_file_header and self.custom_file_header_path:
+                raise Error(self.__validate_custom_file_header_err)
+            return self
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_keyword_only(self: Self) -> Self:  # pyright: ignore[reportRedeclaration]
+            output_model_type: DataModelType = self.output_model_type
+            python_target: PythonVersion = self.target_python_version
+            if (
+                self.keyword_only
+                and output_model_type == DataModelType.DataclassesDataclass
+                and not python_target.has_kw_only_dataclass
+            ):
+                raise Error(self.__validate_keyword_only_err)
+            return self
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_root(self: Self) -> Self:  # pyright: ignore[reportRedeclaration]
+            if self.use_annotated:
+                self.field_constraints = True
+            return self
+
+    else:
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_output_datetime_class(cls, values: dict[str, Any]) -> dict[str, Any]:  # noqa: N805
+            datetime_class_type: DatetimeClassType | None = values.get("output_datetime_class")
+            if (
+                datetime_class_type
+                and datetime_class_type is not DatetimeClassType.Datetime
+                and values.get("output_model_type") == DataModelType.DataclassesDataclass
+            ):
+                raise Error(cls.__validate_output_datetime_class_err)
+            return values
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_original_field_name_delimiter(cls, values: dict[str, Any]) -> dict[str, Any]:  # noqa: N805
+            if values.get("original_field_name_delimiter") is not None and not values.get("snake_case_field"):
+                raise Error(cls.__validate_original_field_name_delimiter_err)
+            return values
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_custom_file_header(cls, values: dict[str, Any]) -> dict[str, Any]:  # noqa: N805
+            if values.get("custom_file_header") and values.get("custom_file_header_path"):
+                raise Error(cls.__validate_custom_file_header_err)
+            return values
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_keyword_only(cls, values: dict[str, Any]) -> dict[str, Any]:  # noqa: N805
+            output_model_type: DataModelType = cast("DataModelType", values.get("output_model_type"))
+            python_target: PythonVersion = cast("PythonVersion", values.get("target_python_version"))
+            if (
+                values.get("keyword_only")
+                and output_model_type == DataModelType.DataclassesDataclass
+                and not python_target.has_kw_only_dataclass
+            ):
+                raise Error(cls.__validate_keyword_only_err)
+            return values
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_root(cls, values: dict[str, Any]) -> dict[str, Any]:  # noqa: N805
+            if values.get("use_annotated"):
+                values["field_constraints"] = True
+            return values
+
+    input: Optional[Union[Path, str]] = None  # noqa: UP007, UP045
+    input_file_type: InputFileType = InputFileType.Auto
+    output_model_type: DataModelType = DataModelType.PydanticBaseModel
+    output: Optional[Path] = None  # noqa: UP045
+    debug: bool = False
+    disable_warnings: bool = False
+    target_python_version: PythonVersion = PythonVersionMin
+    base_class: str = ""
+    additional_imports: Optional[list[str]] = None  # noqa: UP045
+    custom_template_dir: Optional[Path] = None  # noqa: UP045
+    extra_template_data: Optional[TextIOBase] = None  # noqa: UP045
+    validation: bool = False
+    field_constraints: bool = False
+    snake_case_field: bool = False
+    strip_default_none: bool = False
+    aliases: Optional[TextIOBase] = None  # noqa: UP045
+    disable_timestamp: bool = False
+    enable_version_header: bool = False
+    allow_population_by_field_name: bool = False
+    allow_extra_fields: bool = False
+    extra_fields: Optional[str] = None  # noqa: UP045
+    use_default: bool = False
+    force_optional: bool = False
+    class_name: Optional[str] = None  # noqa: UP045
+    use_standard_collections: bool = False
+    use_schema_description: bool = False
+    use_field_description: bool = False
+    use_default_kwarg: bool = False
+    reuse_model: bool = False
+    encoding: str = DEFAULT_ENCODING
+    enum_field_as_literal: Optional[LiteralType] = None  # noqa: UP045
+    use_one_literal_as_default: bool = False
+    set_default_enum_member: bool = False
+    use_subclass_enum: bool = False
+    strict_nullable: bool = False
+    use_generic_container_types: bool = False
+    use_union_operator: bool = False
+    enable_faux_immutability: bool = False
+    url: Optional[ParseResult] = None  # noqa: UP045
+    disable_appending_item_suffix: bool = False
+    strict_types: list[StrictTypes] = []
+    empty_enum_field_name: Optional[str] = None  # noqa: UP045
+    field_extra_keys: Optional[set[str]] = None  # noqa: UP045
+    field_include_all_keys: bool = False
+    field_extra_keys_without_x_prefix: Optional[set[str]] = None  # noqa: UP045
+    openapi_scopes: Optional[list[OpenAPIScope]] = [OpenAPIScope.Schemas]  # noqa: UP045
+    include_path_parameters: bool = False
+    wrap_string_literal: Optional[bool] = None  # noqa: UP045
+    use_title_as_name: bool = False
+    use_operation_id_as_name: bool = False
+    use_unique_items_as_set: bool = False
+    http_headers: Optional[Sequence[tuple[str, str]]] = None  # noqa: UP045
+    http_ignore_tls: bool = False
+    use_annotated: bool = False
+    use_non_positive_negative_number_constrained_types: bool = False
+    original_field_name_delimiter: Optional[str] = None  # noqa: UP045
+    use_double_quotes: bool = False
+    collapse_root_models: bool = False
+    special_field_name_prefix: Optional[str] = None  # noqa: UP045
+    remove_special_field_name_prefix: bool = False
+    capitalise_enum_members: bool = False
+    keep_model_order: bool = False
+    custom_file_header: Optional[str] = None  # noqa: UP045
+    custom_file_header_path: Optional[Path] = None  # noqa: UP045
+    custom_formatters: Optional[list[str]] = None  # noqa: UP045
+    custom_formatters_kwargs: Optional[TextIOBase] = None  # noqa: UP045
+    use_pendulum: bool = False
+    http_query_parameters: Optional[Sequence[tuple[str, str]]] = None  # noqa: UP045
+    treat_dot_as_module: bool = False
+    use_exact_imports: bool = False
+    union_mode: Optional[UnionMode] = None  # noqa: UP045
+    output_datetime_class: Optional[DatetimeClassType] = None  # noqa: UP045
+    keyword_only: bool = False
+    frozen_dataclasses: bool = False
+    no_alias: bool = False
+    formatters: list[Formatter] = DEFAULT_FORMATTERS
+    parent_scoped_naming: bool = False
+    disable_future_imports: bool = False
+
+    def merge_args(self, args: Namespace) -> None:
+        set_args = {f: getattr(args, f) for f in self.get_fields() if getattr(args, f) is not None}
+
+        if set_args.get("output_model_type") == DataModelType.MsgspecStruct.value:
+            set_args["use_annotated"] = True
+
+        if set_args.get("use_annotated"):
+            set_args["field_constraints"] = True
+
+        parsed_args = Config.parse_obj(set_args)
+        for field_name in set_args:
+            setattr(self, field_name, getattr(parsed_args, field_name))
+
+
+def _get_pyproject_toml_config(source: Path) -> dict[str, Any]:
+    """Find and return the [tool.datamodel-codgen] section of the closest
+    pyproject.toml if it exists.
+    """
+
+    current_path = source
+    while current_path != current_path.parent:
+        if (current_path / "pyproject.toml").is_file():
+            pyproject_toml = load_toml(current_path / "pyproject.toml")
+            if "datamodel-codegen" in pyproject_toml.get("tool", {}):
+                pyproject_config = pyproject_toml["tool"]["datamodel-codegen"]
+                # Convert options from kebap- to snake-case
+                pyproject_config = {k.replace("-", "_"): v for k, v in pyproject_config.items()}
+                # Replace US-american spelling if present (ignore if british spelling is present)
+                if "capitalize_enum_members" in pyproject_config and "capitalise_enum_members" not in pyproject_config:
+                    pyproject_config["capitalise_enum_members"] = pyproject_config.pop("capitalize_enum_members")
+                return pyproject_config
+
+        if (current_path / ".git").exists():
+            # Stop early if we see a git repository root.
+            return {}
+
+        current_path = current_path.parent
+    return {}
+
+
+def main(args: Sequence[str] | None = None) -> Exit:  # noqa: PLR0911, PLR0912, PLR0915
+    """Main function."""
+
+    # add cli completion support
+    argcomplete.autocomplete(arg_parser)
+
+    if args is None:  # pragma: no cover
+        args = sys.argv[1:]
+
+    arg_parser.parse_args(args, namespace=namespace)
+
+    if namespace.version:
+        from datamodel_code_generator import get_version  # noqa: PLC0415
+
+        print(get_version())  # noqa: T201
+        sys.exit(0)
+
+    pyproject_config = _get_pyproject_toml_config(Path.cwd())
+
+    try:
+        config = Config.parse_obj(pyproject_config)
+        config.merge_args(namespace)
+    except Error as e:
+        print(e.message, file=sys.stderr)  # noqa: T201
+        return Exit.ERROR
+
+    if not config.input and not config.url and sys.stdin.isatty():
+        print(  # noqa: T201
+            "Not Found Input: require `stdin` or arguments `--input` or `--url`",
+            file=sys.stderr,
+        )
+        arg_parser.print_help()
+        return Exit.ERROR
+
+    if not is_supported_in_black(config.target_python_version):  # pragma: no cover
+        print(  # noqa: T201
+            f"Installed black doesn't support Python version {config.target_python_version.value}.\n"
+            f"You have to install a newer black.\n"
+            f"Installed black version: {black.__version__}",
+            file=sys.stderr,
+        )
+        return Exit.ERROR
+
+    if config.debug:  # pragma: no cover
+        enable_debug_message()
+
+    if config.disable_warnings:
+        warnings.simplefilter("ignore")
+    extra_template_data: defaultdict[str, dict[str, Any]] | None
+    if config.extra_template_data is None:
+        extra_template_data = None
+    else:
+        with config.extra_template_data as data:
+            try:
+                extra_template_data = json.load(data, object_hook=lambda d: defaultdict(dict, **d))
+            except json.JSONDecodeError as e:
+                print(f"Unable to load extra template data: {e}", file=sys.stderr)  # noqa: T201
+                return Exit.ERROR
+
+    if config.aliases is None:
+        aliases = None
+    else:
+        with config.aliases as data:
+            try:
+                aliases = json.load(data)
+            except json.JSONDecodeError as e:
+                print(f"Unable to load alias mapping: {e}", file=sys.stderr)  # noqa: T201
+                return Exit.ERROR
+        if not isinstance(aliases, dict) or not all(
+            isinstance(k, str) and isinstance(v, str) for k, v in aliases.items()
+        ):
+            print(  # noqa: T201
+                'Alias mapping must be a JSON string mapping (e.g. {"from": "to", ...})',
+                file=sys.stderr,
+            )
+            return Exit.ERROR
+
+    if config.custom_formatters_kwargs is None:
+        custom_formatters_kwargs = None
+    else:
+        with config.custom_formatters_kwargs as data:
+            try:
+                custom_formatters_kwargs = json.load(data)
+            except json.JSONDecodeError as e:  # pragma: no cover
+                print(  # noqa: T201
+                    f"Unable to load custom_formatters_kwargs mapping: {e}",
+                    file=sys.stderr,
+                )
+                return Exit.ERROR
+        if not isinstance(custom_formatters_kwargs, dict) or not all(
+            isinstance(k, str) and isinstance(v, str) for k, v in custom_formatters_kwargs.items()
+        ):  # pragma: no cover
+            print(  # noqa: T201
+                'Custom formatters kwargs mapping must be a JSON string mapping (e.g. {"from": "to", ...})',
+                file=sys.stderr,
+            )
+            return Exit.ERROR
+
+    try:
+        generate(
+            input_=config.url or config.input or sys.stdin.read(),
+            input_file_type=config.input_file_type,
+            output=config.output,
+            output_model_type=config.output_model_type,
+            target_python_version=config.target_python_version,
+            base_class=config.base_class,
+            additional_imports=config.additional_imports,
+            custom_template_dir=config.custom_template_dir,
+            validation=config.validation,
+            field_constraints=config.field_constraints,
+            snake_case_field=config.snake_case_field,
+            strip_default_none=config.strip_default_none,
+            extra_template_data=extra_template_data,
+            aliases=aliases,
+            disable_timestamp=config.disable_timestamp,
+            enable_version_header=config.enable_version_header,
+            allow_population_by_field_name=config.allow_population_by_field_name,
+            allow_extra_fields=config.allow_extra_fields,
+            extra_fields=config.extra_fields,
+            apply_default_values_for_required_fields=config.use_default,
+            force_optional_for_required_fields=config.force_optional,
+            class_name=config.class_name,
+            use_standard_collections=config.use_standard_collections,
+            use_schema_description=config.use_schema_description,
+            use_field_description=config.use_field_description,
+            use_default_kwarg=config.use_default_kwarg,
+            reuse_model=config.reuse_model,
+            encoding=config.encoding,
+            enum_field_as_literal=config.enum_field_as_literal,
+            use_one_literal_as_default=config.use_one_literal_as_default,
+            set_default_enum_member=config.set_default_enum_member,
+            use_subclass_enum=config.use_subclass_enum,
+            strict_nullable=config.strict_nullable,
+            use_generic_container_types=config.use_generic_container_types,
+            enable_faux_immutability=config.enable_faux_immutability,
+            disable_appending_item_suffix=config.disable_appending_item_suffix,
+            strict_types=config.strict_types,
+            empty_enum_field_name=config.empty_enum_field_name,
+            field_extra_keys=config.field_extra_keys,
+            field_include_all_keys=config.field_include_all_keys,
+            field_extra_keys_without_x_prefix=config.field_extra_keys_without_x_prefix,
+            openapi_scopes=config.openapi_scopes,
+            include_path_parameters=config.include_path_parameters,
+            wrap_string_literal=config.wrap_string_literal,
+            use_title_as_name=config.use_title_as_name,
+            use_operation_id_as_name=config.use_operation_id_as_name,
+            use_unique_items_as_set=config.use_unique_items_as_set,
+            http_headers=config.http_headers,
+            http_ignore_tls=config.http_ignore_tls,
+            use_annotated=config.use_annotated,
+            use_non_positive_negative_number_constrained_types=config.use_non_positive_negative_number_constrained_types,
+            original_field_name_delimiter=config.original_field_name_delimiter,
+            use_double_quotes=config.use_double_quotes,
+            collapse_root_models=config.collapse_root_models,
+            use_union_operator=config.use_union_operator,
+            special_field_name_prefix=config.special_field_name_prefix,
+            remove_special_field_name_prefix=config.remove_special_field_name_prefix,
+            capitalise_enum_members=config.capitalise_enum_members,
+            keep_model_order=config.keep_model_order,
+            custom_file_header=config.custom_file_header,
+            custom_file_header_path=config.custom_file_header_path,
+            custom_formatters=config.custom_formatters,
+            custom_formatters_kwargs=custom_formatters_kwargs,
+            use_pendulum=config.use_pendulum,
+            http_query_parameters=config.http_query_parameters,
+            treat_dot_as_module=config.treat_dot_as_module,
+            use_exact_imports=config.use_exact_imports,
+            union_mode=config.union_mode,
+            output_datetime_class=config.output_datetime_class,
+            keyword_only=config.keyword_only,
+            frozen_dataclasses=config.frozen_dataclasses,
+            no_alias=config.no_alias,
+            formatters=config.formatters,
+            parent_scoped_naming=config.parent_scoped_naming,
+            disable_future_imports=config.disable_future_imports,
+        )
+    except InvalidClassNameError as e:
+        print(f"{e} You have to set `--class-name` option", file=sys.stderr)  # noqa: T201
+        return Exit.ERROR
+    except Error as e:
+        print(str(e), file=sys.stderr)  # noqa: T201
+        return Exit.ERROR
+    except Exception:  # noqa: BLE001
+        import traceback  # noqa: PLC0415
+
+        print(traceback.format_exc(), file=sys.stderr)  # noqa: T201
+        return Exit.ERROR
+    else:
+        return Exit.OK
+
+
+if __name__ == "__main__":
+    sys.exit(main())
diff -pruN 0.26.4-3/src/datamodel_code_generator/arguments.py 0.34.0-1/src/datamodel_code_generator/arguments.py
--- 0.26.4-3/src/datamodel_code_generator/arguments.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/arguments.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,565 @@
+from __future__ import annotations
+
+import locale
+from argparse import ArgumentParser, FileType, HelpFormatter, Namespace
+from operator import attrgetter
+from typing import TYPE_CHECKING
+
+from datamodel_code_generator import DataModelType, InputFileType, OpenAPIScope
+from datamodel_code_generator.format import DatetimeClassType, Formatter, PythonVersion
+from datamodel_code_generator.model.pydantic_v2 import UnionMode
+from datamodel_code_generator.parser import LiteralType
+from datamodel_code_generator.types import StrictTypes
+
+if TYPE_CHECKING:
+    from argparse import Action
+    from collections.abc import Iterable
+
+DEFAULT_ENCODING = locale.getpreferredencoding()
+
+namespace = Namespace(no_color=False)
+
+
+class SortingHelpFormatter(HelpFormatter):
+    def _bold_cyan(self, text: str) -> str:  # noqa: PLR6301
+        return f"\x1b[36;1m{text}\x1b[0m"
+
+    def add_arguments(self, actions: Iterable[Action]) -> None:
+        actions = sorted(actions, key=attrgetter("option_strings"))
+        super().add_arguments(actions)
+
+    def start_section(self, heading: str | None) -> None:
+        return super().start_section(heading if namespace.no_color or not heading else self._bold_cyan(heading))
+
+
+arg_parser = ArgumentParser(
+    usage="\n  datamodel-codegen [options]",
+    description="Generate Python data models from schema definitions or structured data",
+    formatter_class=SortingHelpFormatter,
+    add_help=False,
+)
+
+base_options = arg_parser.add_argument_group("Options")
+typing_options = arg_parser.add_argument_group("Typing customization")
+field_options = arg_parser.add_argument_group("Field customization")
+model_options = arg_parser.add_argument_group("Model customization")
+extra_fields_model_options = model_options.add_mutually_exclusive_group()
+template_options = arg_parser.add_argument_group("Template customization")
+openapi_options = arg_parser.add_argument_group("OpenAPI-only options")
+general_options = arg_parser.add_argument_group("General options")
+
+# ======================================================================================
+# Base options for input/output
+# ======================================================================================
+base_options.add_argument(
+    "--http-headers",
+    nargs="+",
+    metavar="HTTP_HEADER",
+    help='Set headers in HTTP requests to the remote host. (example: "Authorization: Basic dXNlcjpwYXNz")',
+)
+base_options.add_argument(
+    "--http-query-parameters",
+    nargs="+",
+    metavar="HTTP_QUERY_PARAMETERS",
+    help='Set query parameters in HTTP requests to the remote host. (example: "ref=branch")',
+)
+base_options.add_argument(
+    "--http-ignore-tls",
+    help="Disable verification of the remote host's TLS certificate",
+    action="store_true",
+    default=None,
+)
+base_options.add_argument(
+    "--input",
+    help="Input file/directory (default: stdin)",
+)
+base_options.add_argument(
+    "--input-file-type",
+    help="Input file type (default: auto)",
+    choices=[i.value for i in InputFileType],
+)
+base_options.add_argument(
+    "--output",
+    help="Output file (default: stdout)",
+)
+base_options.add_argument(
+    "--output-model-type",
+    help="Output model type (default: pydantic.BaseModel)",
+    choices=[i.value for i in DataModelType],
+)
+base_options.add_argument(
+    "--url",
+    help="Input file URL. `--input` is ignored when `--url` is used",
+)
+
+# ======================================================================================
+# Customization options for generated models
+# ======================================================================================
+extra_fields_model_options.add_argument(
+    "--allow-extra-fields",
+    help="Deprecated: Allow passing extra fields. This flag is deprecated. Use `--extra-fields=allow` instead.",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--allow-population-by-field-name",
+    help="Allow population by field name",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--class-name",
+    help="Set class name of root model",
+    default=None,
+)
+model_options.add_argument(
+    "--collapse-root-models",
+    action="store_true",
+    default=None,
+    help="Models generated with a root-type field will be merged into the models using that root-type model",
+)
+model_options.add_argument(
+    "--disable-appending-item-suffix",
+    help="Disable appending `Item` suffix to model name in an array",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--disable-timestamp",
+    help="Disable timestamp on file headers",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--enable-faux-immutability",
+    help="Enable faux immutability",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--enable-version-header",
+    help="Enable package version on file headers",
+    action="store_true",
+    default=None,
+)
+extra_fields_model_options.add_argument(
+    "--extra-fields",
+    help="Set the generated models to allow, forbid, or ignore extra fields.",
+    choices=["allow", "ignore", "forbid"],
+    default=None,
+)
+model_options.add_argument(
+    "--keep-model-order",
+    help="Keep generated models' order",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--keyword-only",
+    help="Defined models as keyword only (for example dataclass(kw_only=True)).",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--frozen-dataclasses",
+    help="Generate frozen dataclasses (dataclass(frozen=True)). Only applies to dataclass output.",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--reuse-model",
+    help="Reuse models on the field when a module has the model with the same content",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--target-python-version",
+    help="target python version",
+    choices=[v.value for v in PythonVersion],
+)
+model_options.add_argument(
+    "--treat-dot-as-module",
+    help="treat dotted module names as modules",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--use-schema-description",
+    help="Use schema description to populate class docstring",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--use-title-as-name",
+    help="use titles as class names of models",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--use-pendulum",
+    help="use pendulum instead of datetime",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--use-exact-imports",
+    help='import exact types instead of modules, for example: "from .foo import Bar" instead of '
+    '"from . import foo" with "foo.Bar"',
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--output-datetime-class",
+    help="Choose Datetime class between AwareDatetime, NaiveDatetime or datetime. "
+    "Each output model has its default mapping (for example pydantic: datetime, dataclass: str, ...)",
+    choices=[i.value for i in DatetimeClassType],
+    default=None,
+)
+model_options.add_argument(
+    "--parent-scoped-naming",
+    help="Set name of models defined inline from the parent model",
+    action="store_true",
+    default=None,
+)
+
+# ======================================================================================
+# Typing options for generated models
+# ======================================================================================
+typing_options.add_argument(
+    "--base-class",
+    help="Base Class (default: pydantic.BaseModel)",
+    type=str,
+)
+typing_options.add_argument(
+    "--enum-field-as-literal",
+    help="Parse enum field as literal. "
+    "all: all enum field type are Literal. "
+    "one: field type is Literal when an enum has only one possible value",
+    choices=[lt.value for lt in LiteralType],
+    default=None,
+)
+typing_options.add_argument(
+    "--field-constraints",
+    help="Use field constraints and not con* annotations",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--set-default-enum-member",
+    help="Set enum members as default values for enum field",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--strict-types",
+    help="Use strict types",
+    choices=[t.value for t in StrictTypes],
+    nargs="+",
+)
+typing_options.add_argument(
+    "--use-annotated",
+    help="Use typing.Annotated for Field(). Also, `--field-constraints` option will be enabled.",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--use-generic-container-types",
+    help="Use generic container types for type hinting (typing.Sequence, typing.Mapping). "
+    "If `--use-standard-collections` option is set, then import from collections.abc instead of typing",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--use-non-positive-negative-number-constrained-types",
+    help="Use the Non{Positive,Negative}{FloatInt} types instead of the corresponding con* constrained types.",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--use-one-literal-as-default",
+    help="Use one literal as default value for one literal field",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--use-standard-collections",
+    help="Use standard collections for type hinting (list, dict)",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--use-subclass-enum",
+    help="Define Enum class as subclass with field type when enum has type (int, float, bytes, str)",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--use-union-operator",
+    help="Use | operator for Union type (PEP 604).",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--use-unique-items-as-set",
+    help="define field type as `set` when the field attribute has `uniqueItems`",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--disable-future-imports",
+    help="Disable __future__ imports",
+    action="store_true",
+    default=None,
+)
+
+# ======================================================================================
+# Customization options for generated model fields
+# ======================================================================================
+field_options.add_argument(
+    "--capitalise-enum-members",
+    "--capitalize-enum-members",
+    help="Capitalize field names on enum",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--empty-enum-field-name",
+    help="Set field name when enum value is empty (default:  `_`)",
+    default=None,
+)
+field_options.add_argument(
+    "--field-extra-keys",
+    help="Add extra keys to field parameters",
+    type=str,
+    nargs="+",
+)
+field_options.add_argument(
+    "--field-extra-keys-without-x-prefix",
+    help="Add extra keys with `x-` prefix to field parameters. The extra keys are stripped of the `x-` prefix.",
+    type=str,
+    nargs="+",
+)
+field_options.add_argument(
+    "--field-include-all-keys",
+    help="Add all keys to field parameters",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--force-optional",
+    help="Force optional for required fields",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--original-field-name-delimiter",
+    help="Set delimiter to convert to snake case. This option only can be used with --snake-case-field (default: `_` )",
+    default=None,
+)
+field_options.add_argument(
+    "--remove-special-field-name-prefix",
+    help="Remove field name prefix if it has a special meaning e.g. underscores",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--snake-case-field",
+    help="Change camel-case field name to snake-case",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--special-field-name-prefix",
+    help="Set field name prefix when first character can't be used as Python field name (default:  `field`)",
+    default=None,
+)
+field_options.add_argument(
+    "--strip-default-none",
+    help="Strip default None on fields",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--use-default",
+    help="Use default value even if a field is required",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--use-default-kwarg",
+    action="store_true",
+    help="Use `default=` instead of a positional argument for Fields that have default values.",
+    default=None,
+)
+field_options.add_argument(
+    "--use-field-description",
+    help="Use schema description to populate field docstring",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--union-mode",
+    help="Union mode for only pydantic v2 field",
+    choices=[u.value for u in UnionMode],
+    default=None,
+)
+field_options.add_argument(
+    "--no-alias",
+    help="""Do not add a field alias. E.g., if --snake-case-field is used along with a base class, which has an
+            alias_generator""",
+    action="store_true",
+    default=None,
+)
+
+# ======================================================================================
+# Options for templating output
+# ======================================================================================
+template_options.add_argument(
+    "--aliases",
+    help="Alias mapping file",
+    type=FileType("rt"),
+)
+template_options.add_argument(
+    "--custom-file-header",
+    help="Custom file header",
+    type=str,
+    default=None,
+)
+template_options.add_argument(
+    "--custom-file-header-path",
+    help="Custom file header file path",
+    default=None,
+    type=str,
+)
+template_options.add_argument(
+    "--custom-template-dir",
+    help="Custom template directory",
+    type=str,
+)
+template_options.add_argument(
+    "--encoding",
+    help=f"The encoding of input and output (default: {DEFAULT_ENCODING})",
+    default=None,
+)
+template_options.add_argument(
+    "--extra-template-data",
+    help="Extra template data for output models. Input is supposed to be a json/yaml file. "
+    "For OpenAPI and Jsonschema the keys are the spec path of the object, or the name of the object if you want to "
+    "apply the template data to multiple objects with the same name. "
+    "If you are using another input file type (e.g. GraphQL), the key is the name of the object. "
+    "The value is a dictionary of the template data to add.",
+    type=FileType("rt"),
+)
+template_options.add_argument(
+    "--use-double-quotes",
+    action="store_true",
+    default=None,
+    help="Model generated with double quotes. Single quotes or "
+    "your black config skip_string_normalization value will be used without this option.",
+)
+template_options.add_argument(
+    "--wrap-string-literal",
+    help="Wrap string literal by using black `experimental-string-processing` option (require black 20.8b0 or later)",
+    action="store_true",
+    default=None,
+)
+base_options.add_argument(
+    "--additional-imports",
+    help='Custom imports for output (delimited list input). For example "datetime.date,datetime.datetime"',
+    type=str,
+    default=None,
+)
+base_options.add_argument(
+    "--formatters",
+    help="Formatters for output (default: [black, isort])",
+    choices=[f.value for f in Formatter],
+    nargs="+",
+    default=None,
+)
+base_options.add_argument(
+    "--custom-formatters",
+    help="List of modules with custom formatter (delimited list input).",
+    type=str,
+    default=None,
+)
+template_options.add_argument(
+    "--custom-formatters-kwargs",
+    help="A file with kwargs for custom formatters.",
+    type=FileType("rt"),
+)
+
+# ======================================================================================
+# Options specific to OpenAPI input schemas
+# ======================================================================================
+openapi_options.add_argument(
+    "--openapi-scopes",
+    help="Scopes of OpenAPI model generation (default: schemas)",
+    choices=[o.value for o in OpenAPIScope],
+    nargs="+",
+    default=None,
+)
+openapi_options.add_argument(
+    "--strict-nullable",
+    help="Treat default field as a non-nullable field (Only OpenAPI)",
+    action="store_true",
+    default=None,
+)
+openapi_options.add_argument(
+    "--use-operation-id-as-name",
+    help="use operation id of OpenAPI as class names of models",
+    action="store_true",
+    default=None,
+)
+openapi_options.add_argument(
+    "--include-path-parameters",
+    help="Include path parameters in generated parameter models in addition to query parameters (Only OpenAPI)",
+    action="store_true",
+    default=None,
+)
+openapi_options.add_argument(
+    "--validation",
+    help="Deprecated: Enable validation (Only OpenAPI). this option is deprecated. it will be removed in future "
+    "releases",
+    action="store_true",
+    default=None,
+)
+
+# ======================================================================================
+# General options
+# ======================================================================================
+general_options.add_argument(
+    "--debug",
+    help="show debug message (require \"debug\". `$ pip install 'datamodel-code-generator[debug]'`)",
+    action="store_true",
+    default=None,
+)
+general_options.add_argument(
+    "--disable-warnings",
+    help="disable warnings",
+    action="store_true",
+    default=None,
+)
+general_options.add_argument(
+    "-h",
+    "--help",
+    action="help",
+    default="==SUPPRESS==",
+    help="show this help message and exit",
+)
+general_options.add_argument(
+    "--no-color",
+    action="store_true",
+    default=False,
+    help="disable colorized output",
+)
+general_options.add_argument(
+    "--version",
+    action="store_true",
+    help="show version",
+)
+
+__all__ = [
+    "DEFAULT_ENCODING",
+    "arg_parser",
+    "namespace",
+]
diff -pruN 0.26.4-3/src/datamodel_code_generator/format.py 0.34.0-1/src/datamodel_code_generator/format.py
--- 0.26.4-3/src/datamodel_code_generator/format.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/format.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,266 @@
+from __future__ import annotations
+
+import subprocess  # noqa: S404
+from enum import Enum
+from functools import cached_property
+from importlib import import_module
+from pathlib import Path
+from typing import TYPE_CHECKING, Any
+from warnings import warn
+
+import black
+import isort
+
+from datamodel_code_generator.util import load_toml
+
+try:
+    import black.mode
+except ImportError:  # pragma: no cover
+    black.mode = None
+
+
+class DatetimeClassType(Enum):
+    Datetime = "datetime"
+    Awaredatetime = "AwareDatetime"
+    Naivedatetime = "NaiveDatetime"
+
+
+class PythonVersion(Enum):
+    PY_39 = "3.9"
+    PY_310 = "3.10"
+    PY_311 = "3.11"
+    PY_312 = "3.12"
+    PY_313 = "3.13"
+
+    @cached_property
+    def _is_py_310_or_later(self) -> bool:  # pragma: no cover
+        return self.value != self.PY_39.value
+
+    @cached_property
+    def _is_py_311_or_later(self) -> bool:  # pragma: no cover
+        return self.value not in {self.PY_39.value, self.PY_310.value}
+
+    @property
+    def has_union_operator(self) -> bool:  # pragma: no cover
+        return self._is_py_310_or_later
+
+    @property
+    def has_typed_dict_non_required(self) -> bool:
+        return self._is_py_311_or_later
+
+    @property
+    def has_kw_only_dataclass(self) -> bool:
+        return self._is_py_310_or_later
+
+
+PythonVersionMin = PythonVersion.PY_39
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+    class _TargetVersion(Enum): ...
+
+    BLACK_PYTHON_VERSION: dict[PythonVersion, _TargetVersion]
+else:
+    BLACK_PYTHON_VERSION: dict[PythonVersion, black.TargetVersion] = {
+        v: getattr(black.TargetVersion, f"PY{v.name.split('_')[-1]}")
+        for v in PythonVersion
+        if hasattr(black.TargetVersion, f"PY{v.name.split('_')[-1]}")
+    }
+
+
+def is_supported_in_black(python_version: PythonVersion) -> bool:  # pragma: no cover
+    return python_version in BLACK_PYTHON_VERSION
+
+
+def black_find_project_root(sources: Sequence[Path]) -> Path:
+    if TYPE_CHECKING:
+        from collections.abc import Iterable  # noqa: PLC0415
+
+        def _find_project_root(
+            srcs: Sequence[str] | Iterable[str],
+        ) -> tuple[Path, str] | Path: ...
+
+    else:
+        from black import find_project_root as _find_project_root  # noqa: PLC0415
+    project_root = _find_project_root(tuple(str(s) for s in sources))
+    if isinstance(project_root, tuple):
+        return project_root[0]
+    # pragma: no cover
+    return project_root
+
+
+class Formatter(Enum):
+    BLACK = "black"
+    ISORT = "isort"
+    RUFF_CHECK = "ruff-check"
+    RUFF_FORMAT = "ruff-format"
+
+
+DEFAULT_FORMATTERS = [Formatter.BLACK, Formatter.ISORT]
+
+
+class CodeFormatter:
+    def __init__(  # noqa: PLR0912, PLR0913, PLR0917
+        self,
+        python_version: PythonVersion,
+        settings_path: Path | None = None,
+        wrap_string_literal: bool | None = None,  # noqa: FBT001
+        skip_string_normalization: bool = True,  # noqa: FBT001, FBT002
+        known_third_party: list[str] | None = None,
+        custom_formatters: list[str] | None = None,
+        custom_formatters_kwargs: dict[str, Any] | None = None,
+        encoding: str = "utf-8",
+        formatters: list[Formatter] = DEFAULT_FORMATTERS,
+    ) -> None:
+        if not settings_path:
+            settings_path = Path.cwd()
+
+        root = black_find_project_root((settings_path,))
+        path = root / "pyproject.toml"
+        if path.is_file():
+            pyproject_toml = load_toml(path)
+            config = pyproject_toml.get("tool", {}).get("black", {})
+        else:
+            config = {}
+
+        black_kwargs: dict[str, Any] = {}
+        if wrap_string_literal is not None:
+            experimental_string_processing = wrap_string_literal
+        elif black.__version__ < "24.1.0":
+            experimental_string_processing = config.get("experimental-string-processing")
+        else:
+            experimental_string_processing = config.get("preview", False) and (  # pragma: no cover
+                config.get("unstable", False) or "string_processing" in config.get("enable-unstable-feature", [])
+            )
+
+        if experimental_string_processing is not None:  # pragma: no cover
+            if black.__version__.startswith("19."):
+                warn(
+                    f"black doesn't support `experimental-string-processing` option"
+                    f" for wrapping string literal in {black.__version__}",
+                    stacklevel=2,
+                )
+            elif black.__version__ < "24.1.0":
+                black_kwargs["experimental_string_processing"] = experimental_string_processing
+            elif experimental_string_processing:
+                black_kwargs["preview"] = True
+                black_kwargs["unstable"] = config.get("unstable", False)
+                black_kwargs["enabled_features"] = {black.mode.Preview.string_processing}
+
+        if TYPE_CHECKING:
+            self.black_mode: black.FileMode
+        else:
+            self.black_mode = black.FileMode(
+                target_versions={BLACK_PYTHON_VERSION[python_version]},
+                line_length=config.get("line-length", black.DEFAULT_LINE_LENGTH),
+                string_normalization=not skip_string_normalization or not config.get("skip-string-normalization", True),
+                **black_kwargs,
+            )
+
+        self.settings_path: str = str(settings_path)
+
+        self.isort_config_kwargs: dict[str, Any] = {}
+        if known_third_party:
+            self.isort_config_kwargs["known_third_party"] = known_third_party
+
+        if isort.__version__.startswith("4."):
+            self.isort_config = None
+        else:
+            self.isort_config = isort.Config(settings_path=self.settings_path, **self.isort_config_kwargs)
+
+        self.custom_formatters_kwargs = custom_formatters_kwargs or {}
+        self.custom_formatters = self._check_custom_formatters(custom_formatters)
+        self.encoding = encoding
+        self.formatters = formatters
+
+    def _load_custom_formatter(self, custom_formatter_import: str) -> CustomCodeFormatter:
+        import_ = import_module(custom_formatter_import)
+
+        if not hasattr(import_, "CodeFormatter"):
+            msg = f"Custom formatter module `{import_.__name__}` must contains object with name CodeFormatter"
+            raise NameError(msg)
+
+        formatter_class = import_.__getattribute__("CodeFormatter")  # noqa: PLC2801
+
+        if not issubclass(formatter_class, CustomCodeFormatter):
+            msg = f"The custom module {custom_formatter_import} must inherit from `datamodel-code-generator`"
+            raise TypeError(msg)
+
+        return formatter_class(formatter_kwargs=self.custom_formatters_kwargs)
+
+    def _check_custom_formatters(self, custom_formatters: list[str] | None) -> list[CustomCodeFormatter]:
+        if custom_formatters is None:
+            return []
+
+        return [self._load_custom_formatter(custom_formatter_import) for custom_formatter_import in custom_formatters]
+
+    def format_code(
+        self,
+        code: str,
+    ) -> str:
+        if Formatter.ISORT in self.formatters:
+            code = self.apply_isort(code)
+        if Formatter.BLACK in self.formatters:
+            code = self.apply_black(code)
+
+        if Formatter.RUFF_CHECK in self.formatters:
+            code = self.apply_ruff_lint(code)
+
+        if Formatter.RUFF_FORMAT in self.formatters:
+            code = self.apply_ruff_formatter(code)
+
+        for formatter in self.custom_formatters:
+            code = formatter.apply(code)
+
+        return code
+
+    def apply_black(self, code: str) -> str:
+        return black.format_str(
+            code,
+            mode=self.black_mode,
+        )
+
+    def apply_ruff_lint(self, code: str) -> str:
+        result = subprocess.run(
+            ("ruff", "check", "--fix", "-"),
+            input=code.encode(self.encoding),
+            capture_output=True,
+            check=False,
+        )
+        return result.stdout.decode(self.encoding)
+
+    def apply_ruff_formatter(self, code: str) -> str:
+        result = subprocess.run(
+            ("ruff", "format", "-"),
+            input=code.encode(self.encoding),
+            capture_output=True,
+            check=False,
+        )
+        return result.stdout.decode(self.encoding)
+
+    if TYPE_CHECKING:
+
+        def apply_isort(self, code: str) -> str: ...
+
+    elif isort.__version__.startswith("4."):
+
+        def apply_isort(self, code: str) -> str:
+            return isort.SortImports(
+                file_contents=code,
+                settings_path=self.settings_path,
+                **self.isort_config_kwargs,
+            ).output
+
+    else:
+
+        def apply_isort(self, code: str) -> str:
+            return isort.code(code, config=self.isort_config)
+
+
+class CustomCodeFormatter:
+    def __init__(self, formatter_kwargs: dict[str, Any]) -> None:
+        self.formatter_kwargs = formatter_kwargs
+
+    def apply(self, code: str) -> str:
+        raise NotImplementedError
diff -pruN 0.26.4-3/src/datamodel_code_generator/http.py 0.34.0-1/src/datamodel_code_generator/http.py
--- 0.26.4-3/src/datamodel_code_generator/http.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/http.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,32 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+try:
+    import httpx
+except ImportError as exc:  # pragma: no cover
+    msg = "Please run `$pip install 'datamodel-code-generator[http]`' to resolve URL Reference"
+    raise Exception(msg) from exc  # noqa: TRY002
+
+
+def get_body(
+    url: str,
+    headers: Sequence[tuple[str, str]] | None = None,
+    ignore_tls: bool = False,  # noqa: FBT001, FBT002
+    query_parameters: Sequence[tuple[str, str]] | None = None,
+) -> str:
+    return httpx.get(
+        url,
+        headers=headers,
+        verify=not ignore_tls,
+        follow_redirects=True,
+        params=query_parameters,  # pyright: ignore[reportArgumentType]
+        # TODO: Improve params type
+    ).text
+
+
+def join_url(url: str, ref: str = ".") -> str:
+    return str(httpx.URL(url).join(ref))
diff -pruN 0.26.4-3/src/datamodel_code_generator/imports.py 0.34.0-1/src/datamodel_code_generator/imports.py
--- 0.26.4-3/src/datamodel_code_generator/imports.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/imports.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,121 @@
+from __future__ import annotations
+
+from collections import defaultdict
+from functools import lru_cache
+from itertools import starmap
+from typing import TYPE_CHECKING, Optional
+
+from datamodel_code_generator.util import BaseModel
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable
+
+
+class Import(BaseModel):
+    from_: Optional[str] = None  # noqa: UP045
+    import_: str
+    alias: Optional[str] = None  # noqa: UP045
+    reference_path: Optional[str] = None  # noqa: UP045
+
+    @classmethod
+    @lru_cache
+    def from_full_path(cls, class_path: str) -> Import:
+        split_class_path: list[str] = class_path.split(".")
+        return Import(from_=".".join(split_class_path[:-1]) or None, import_=split_class_path[-1])
+
+
+class Imports(defaultdict[Optional[str], set[str]]):
+    def __str__(self) -> str:
+        return self.dump()
+
+    def __init__(self, use_exact: bool = False) -> None:  # noqa: FBT001, FBT002
+        super().__init__(set)
+        self.alias: defaultdict[str | None, dict[str, str]] = defaultdict(dict)
+        self.counter: dict[tuple[str | None, str], int] = defaultdict(int)
+        self.reference_paths: dict[str, Import] = {}
+        self.use_exact: bool = use_exact
+
+    def _set_alias(self, from_: str | None, imports: set[str]) -> list[str]:
+        return [
+            f"{i} as {self.alias[from_][i]}" if i in self.alias[from_] and i != self.alias[from_][i] else i
+            for i in sorted(imports)
+        ]
+
+    def create_line(self, from_: str | None, imports: set[str]) -> str:
+        if from_:
+            return f"from {from_} import {', '.join(self._set_alias(from_, imports))}"
+        return "\n".join(f"import {i}" for i in self._set_alias(from_, imports))
+
+    def dump(self) -> str:
+        return "\n".join(starmap(self.create_line, self.items()))
+
+    def append(self, imports: Import | Iterable[Import] | None) -> None:
+        if imports:
+            if isinstance(imports, Import):
+                imports = [imports]
+            for import_ in imports:
+                if import_.reference_path:
+                    self.reference_paths[import_.reference_path] = import_
+                if "." in import_.import_:
+                    self[None].add(import_.import_)
+                    self.counter[None, import_.import_] += 1
+                else:
+                    self[import_.from_].add(import_.import_)
+                    self.counter[import_.from_, import_.import_] += 1
+                    if import_.alias:
+                        self.alias[import_.from_][import_.import_] = import_.alias
+
+    def remove(self, imports: Import | Iterable[Import]) -> None:
+        if isinstance(imports, Import):  # pragma: no cover
+            imports = [imports]
+        for import_ in imports:
+            if "." in import_.import_:  # pragma: no cover
+                self.counter[None, import_.import_] -= 1
+                if self.counter[None, import_.import_] == 0:  # pragma: no cover
+                    self[None].remove(import_.import_)
+                    if not self[None]:
+                        del self[None]
+            else:
+                self.counter[import_.from_, import_.import_] -= 1  # pragma: no cover
+                if self.counter[import_.from_, import_.import_] == 0:  # pragma: no cover
+                    self[import_.from_].remove(import_.import_)
+                    if not self[import_.from_]:
+                        del self[import_.from_]
+                    if import_.alias:  # pragma: no cover
+                        del self.alias[import_.from_][import_.import_]
+                        if not self.alias[import_.from_]:
+                            del self.alias[import_.from_]
+
+    def remove_referenced_imports(self, reference_path: str) -> None:
+        if reference_path in self.reference_paths:
+            self.remove(self.reference_paths[reference_path])
+
+
+IMPORT_ANNOTATED = Import.from_full_path("typing.Annotated")
+IMPORT_ANY = Import.from_full_path("typing.Any")
+IMPORT_LIST = Import.from_full_path("typing.List")
+IMPORT_SET = Import.from_full_path("typing.Set")
+IMPORT_UNION = Import.from_full_path("typing.Union")
+IMPORT_OPTIONAL = Import.from_full_path("typing.Optional")
+IMPORT_LITERAL = Import.from_full_path("typing.Literal")
+IMPORT_TYPE_ALIAS = Import.from_full_path("typing.TypeAlias")
+IMPORT_SEQUENCE = Import.from_full_path("typing.Sequence")
+IMPORT_FROZEN_SET = Import.from_full_path("typing.FrozenSet")
+IMPORT_MAPPING = Import.from_full_path("typing.Mapping")
+IMPORT_ABC_SEQUENCE = Import.from_full_path("collections.abc.Sequence")
+IMPORT_ABC_SET = Import.from_full_path("collections.abc.Set")
+IMPORT_ABC_MAPPING = Import.from_full_path("collections.abc.Mapping")
+IMPORT_ENUM = Import.from_full_path("enum.Enum")
+IMPORT_ANNOTATIONS = Import.from_full_path("__future__.annotations")
+IMPORT_DICT = Import.from_full_path("typing.Dict")
+IMPORT_DECIMAL = Import.from_full_path("decimal.Decimal")
+IMPORT_DATE = Import.from_full_path("datetime.date")
+IMPORT_DATETIME = Import.from_full_path("datetime.datetime")
+IMPORT_TIMEDELTA = Import.from_full_path("datetime.timedelta")
+IMPORT_PATH = Import.from_full_path("pathlib.Path")
+IMPORT_TIME = Import.from_full_path("datetime.time")
+IMPORT_UUID = Import.from_full_path("uuid.UUID")
+IMPORT_PENDULUM_DATE = Import.from_full_path("pendulum.Date")
+IMPORT_PENDULUM_DATETIME = Import.from_full_path("pendulum.DateTime")
+IMPORT_PENDULUM_DURATION = Import.from_full_path("pendulum.Duration")
+IMPORT_PENDULUM_TIME = Import.from_full_path("pendulum.Time")
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/__init__.py 0.34.0-1/src/datamodel_code_generator/model/__init__.py
--- 0.26.4-3/src/datamodel_code_generator/model/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,86 @@
+from __future__ import annotations
+
+import sys
+from typing import TYPE_CHECKING, Callable, NamedTuple
+
+from datamodel_code_generator import PythonVersion
+
+from .base import ConstraintsBase, DataModel, DataModelFieldBase
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable
+
+    from datamodel_code_generator import DataModelType
+    from datamodel_code_generator.types import DataTypeManager as DataTypeManagerABC
+
+DEFAULT_TARGET_PYTHON_VERSION = PythonVersion(f"{sys.version_info.major}.{sys.version_info.minor}")
+
+
+class DataModelSet(NamedTuple):
+    data_model: type[DataModel]
+    root_model: type[DataModel]
+    field_model: type[DataModelFieldBase]
+    data_type_manager: type[DataTypeManagerABC]
+    dump_resolve_reference_action: Callable[[Iterable[str]], str] | None
+    known_third_party: list[str] | None = None
+
+
+def get_data_model_types(
+    data_model_type: DataModelType,
+    target_python_version: PythonVersion = DEFAULT_TARGET_PYTHON_VERSION,
+) -> DataModelSet:
+    from datamodel_code_generator import DataModelType  # noqa: PLC0415
+
+    from . import dataclass, msgspec, pydantic, pydantic_v2, rootmodel, typed_dict  # noqa: PLC0415
+    from .types import DataTypeManager  # noqa: PLC0415
+
+    if data_model_type == DataModelType.PydanticBaseModel:
+        return DataModelSet(
+            data_model=pydantic.BaseModel,
+            root_model=pydantic.CustomRootType,
+            field_model=pydantic.DataModelField,
+            data_type_manager=pydantic.DataTypeManager,
+            dump_resolve_reference_action=pydantic.dump_resolve_reference_action,
+        )
+    if data_model_type == DataModelType.PydanticV2BaseModel:
+        return DataModelSet(
+            data_model=pydantic_v2.BaseModel,
+            root_model=pydantic_v2.RootModel,
+            field_model=pydantic_v2.DataModelField,
+            data_type_manager=pydantic_v2.DataTypeManager,
+            dump_resolve_reference_action=pydantic_v2.dump_resolve_reference_action,
+        )
+    if data_model_type == DataModelType.DataclassesDataclass:
+        return DataModelSet(
+            data_model=dataclass.DataClass,
+            root_model=rootmodel.RootModel,
+            field_model=dataclass.DataModelField,
+            data_type_manager=dataclass.DataTypeManager,
+            dump_resolve_reference_action=None,
+        )
+    if data_model_type == DataModelType.TypingTypedDict:
+        return DataModelSet(
+            data_model=typed_dict.TypedDict,
+            root_model=rootmodel.RootModel,
+            field_model=(
+                typed_dict.DataModelField
+                if target_python_version.has_typed_dict_non_required
+                else typed_dict.DataModelFieldBackport
+            ),
+            data_type_manager=DataTypeManager,
+            dump_resolve_reference_action=None,
+        )
+    if data_model_type == DataModelType.MsgspecStruct:
+        return DataModelSet(
+            data_model=msgspec.Struct,
+            root_model=msgspec.RootModel,
+            field_model=msgspec.DataModelField,
+            data_type_manager=msgspec.DataTypeManager,
+            dump_resolve_reference_action=None,
+            known_third_party=["msgspec"],
+        )
+    msg = f"{data_model_type} is unsupported data model type"
+    raise ValueError(msg)  # pragma: no cover
+
+
+__all__ = ["ConstraintsBase", "DataModel", "DataModelFieldBase"]
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/base.py 0.34.0-1/src/datamodel_code_generator/model/base.py
--- 0.26.4-3/src/datamodel_code_generator/model/base.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/base.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,448 @@
+from __future__ import annotations
+
+import re
+from abc import ABC, abstractmethod
+from collections import defaultdict
+from copy import deepcopy
+from functools import cached_property, lru_cache
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, ClassVar, Optional, TypeVar
+from warnings import warn
+
+from jinja2 import Environment, FileSystemLoader, Template
+from pydantic import Field
+
+from datamodel_code_generator.imports import (
+    IMPORT_ANNOTATED,
+    IMPORT_OPTIONAL,
+    IMPORT_UNION,
+    Import,
+)
+from datamodel_code_generator.reference import Reference, _BaseModel
+from datamodel_code_generator.types import (
+    ANY,
+    NONE,
+    UNION_PREFIX,
+    DataType,
+    Nullable,
+    chain_as_tuple,
+    get_optional_type,
+)
+from datamodel_code_generator.util import PYDANTIC_V2, ConfigDict
+
+if TYPE_CHECKING:
+    from collections.abc import Iterator
+
+TEMPLATE_DIR: Path = Path(__file__).parents[0] / "template"
+
+ALL_MODEL: str = "#all#"
+
+ConstraintsBaseT = TypeVar("ConstraintsBaseT", bound="ConstraintsBase")
+
+
+class ConstraintsBase(_BaseModel):
+    unique_items: Optional[bool] = Field(None, alias="uniqueItems")  # noqa: UP045
+    _exclude_fields: ClassVar[set[str]] = {"has_constraints"}
+    if PYDANTIC_V2:
+        model_config = ConfigDict(  # pyright: ignore[reportAssignmentType]
+            arbitrary_types_allowed=True, ignored_types=(cached_property,)
+        )
+    else:
+
+        class Config:
+            arbitrary_types_allowed = True
+            keep_untouched = (cached_property,)
+
+    @cached_property
+    def has_constraints(self) -> bool:
+        return any(v is not None for v in self.dict().values())
+
+    @staticmethod
+    def merge_constraints(a: ConstraintsBaseT, b: ConstraintsBaseT) -> ConstraintsBaseT | None:
+        constraints_class = None
+        if isinstance(a, ConstraintsBase):  # pragma: no cover
+            root_type_field_constraints = {k: v for k, v in a.dict(by_alias=True).items() if v is not None}
+            constraints_class = a.__class__
+        else:
+            root_type_field_constraints = {}  # pragma: no cover
+
+        if isinstance(b, ConstraintsBase):  # pragma: no cover
+            model_field_constraints = {k: v for k, v in b.dict(by_alias=True).items() if v is not None}
+            constraints_class = constraints_class or b.__class__
+        else:
+            model_field_constraints = {}
+
+        if constraints_class is None or not issubclass(constraints_class, ConstraintsBase):  # pragma: no cover
+            return None
+
+        return constraints_class.parse_obj({
+            **root_type_field_constraints,
+            **model_field_constraints,
+        })
+
+
+class DataModelFieldBase(_BaseModel):
+    name: Optional[str] = None  # noqa: UP045
+    default: Optional[Any] = None  # noqa: UP045
+    required: bool = False
+    alias: Optional[str] = None  # noqa: UP045
+    data_type: DataType
+    constraints: Any = None
+    strip_default_none: bool = False
+    nullable: Optional[bool] = None  # noqa: UP045
+    parent: Optional[Any] = None  # noqa: UP045
+    extras: dict[str, Any] = {}  # noqa: RUF012
+    use_annotated: bool = False
+    has_default: bool = False
+    use_field_description: bool = False
+    const: bool = False
+    original_name: Optional[str] = None  # noqa: UP045
+    use_default_kwarg: bool = False
+    use_one_literal_as_default: bool = False
+    _exclude_fields: ClassVar[set[str]] = {"parent"}
+    _pass_fields: ClassVar[set[str]] = {"parent", "data_type"}
+    can_have_extra_keys: ClassVar[bool] = True
+    type_has_null: Optional[bool] = None  # noqa: UP045
+
+    if not TYPE_CHECKING:
+
+        def __init__(self, **data: Any) -> None:
+            super().__init__(**data)
+            if self.data_type.reference or self.data_type.data_types:
+                self.data_type.parent = self
+            self.process_const()
+
+    def process_const(self) -> None:
+        if "const" not in self.extras:
+            return
+        self.default = self.extras["const"]
+        self.const = True
+        self.required = False
+        self.nullable = False
+
+    @property
+    def type_hint(self) -> str:  # noqa: PLR0911
+        type_hint = self.data_type.type_hint
+
+        if not type_hint:
+            return NONE
+        if self.has_default_factory or (self.data_type.is_optional and self.data_type.type != ANY):
+            return type_hint
+        if self.nullable is not None:
+            if self.nullable:
+                return get_optional_type(type_hint, self.data_type.use_union_operator)
+            return type_hint
+        if self.required:
+            if self.type_has_null:
+                return get_optional_type(type_hint, self.data_type.use_union_operator)
+            return type_hint
+        if self.fall_back_to_nullable:
+            return get_optional_type(type_hint, self.data_type.use_union_operator)
+        return type_hint
+
+    @property
+    def imports(self) -> tuple[Import, ...]:
+        type_hint = self.type_hint
+        has_union = not self.data_type.use_union_operator and UNION_PREFIX in type_hint
+        imports: list[tuple[Import] | Iterator[Import]] = [
+            iter(i for i in self.data_type.all_imports if not (not has_union and i == IMPORT_UNION))
+        ]
+
+        if self.fall_back_to_nullable:
+            if (
+                self.nullable or (self.nullable is None and not self.required)
+            ) and not self.data_type.use_union_operator:
+                imports.append((IMPORT_OPTIONAL,))
+        elif self.nullable and not self.data_type.use_union_operator:  # pragma: no cover
+            imports.append((IMPORT_OPTIONAL,))
+        if self.use_annotated and self.annotated:
+            imports.append((IMPORT_ANNOTATED,))
+        return chain_as_tuple(*imports)
+
+    @property
+    def docstring(self) -> str | None:
+        if self.use_field_description:
+            description = self.extras.get("description", None)
+            if description is not None:
+                return f"{description}"
+        return None
+
+    @property
+    def unresolved_types(self) -> frozenset[str]:
+        return self.data_type.unresolved_types
+
+    @property
+    def field(self) -> str | None:
+        """for backwards compatibility"""
+        return None
+
+    @property
+    def method(self) -> str | None:
+        return None
+
+    @property
+    def represented_default(self) -> str:
+        return repr(self.default)
+
+    @property
+    def annotated(self) -> str | None:
+        return None
+
+    @property
+    def has_default_factory(self) -> bool:
+        return "default_factory" in self.extras
+
+    @property
+    def fall_back_to_nullable(self) -> bool:
+        return True
+
+
+@lru_cache
+def get_template(template_file_path: Path) -> Template:
+    loader = FileSystemLoader(str(TEMPLATE_DIR / template_file_path.parent))
+    environment: Environment = Environment(loader=loader)  # noqa: S701
+    return environment.get_template(template_file_path.name)
+
+
+def sanitize_module_name(name: str, *, treat_dot_as_module: bool) -> str:
+    pattern = r"[^0-9a-zA-Z_.]" if treat_dot_as_module else r"[^0-9a-zA-Z_]"
+    sanitized = re.sub(pattern, "_", name)
+    if sanitized and sanitized[0].isdigit():
+        sanitized = f"_{sanitized}"
+    return sanitized
+
+
+def get_module_path(name: str, file_path: Path | None, *, treat_dot_as_module: bool) -> list[str]:
+    if file_path:
+        sanitized_stem = sanitize_module_name(file_path.stem, treat_dot_as_module=treat_dot_as_module)
+        return [
+            *file_path.parts[:-1],
+            sanitized_stem,
+            *name.split(".")[:-1],
+        ]
+    return name.split(".")[:-1]
+
+
+def get_module_name(name: str, file_path: Path | None, *, treat_dot_as_module: bool) -> str:
+    return ".".join(get_module_path(name, file_path, treat_dot_as_module=treat_dot_as_module))
+
+
+class TemplateBase(ABC):
+    @cached_property
+    @abstractmethod
+    def template_file_path(self) -> Path:
+        raise NotImplementedError
+
+    @cached_property
+    def template(self) -> Template:
+        return get_template(self.template_file_path)
+
+    @abstractmethod
+    def render(self) -> str:
+        raise NotImplementedError
+
+    def _render(self, *args: Any, **kwargs: Any) -> str:
+        return self.template.render(*args, **kwargs)
+
+    def __str__(self) -> str:
+        return self.render()
+
+
+class BaseClassDataType(DataType): ...
+
+
+UNDEFINED: Any = object()
+
+
+class DataModel(TemplateBase, Nullable, ABC):
+    TEMPLATE_FILE_PATH: ClassVar[str] = ""
+    BASE_CLASS: ClassVar[str] = ""
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = ()
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        methods: list[str] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        frozen: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        self.keyword_only = keyword_only
+        self.frozen = frozen
+        if not self.TEMPLATE_FILE_PATH:
+            msg = "TEMPLATE_FILE_PATH is undefined"
+            raise Exception(msg)  # noqa: TRY002
+
+        self._custom_template_dir: Path | None = custom_template_dir
+        self.decorators: list[str] = decorators or []
+        self._additional_imports: list[Import] = []
+        self.custom_base_class = custom_base_class
+        if base_classes:
+            self.base_classes: list[BaseClassDataType] = [BaseClassDataType(reference=b) for b in base_classes]
+        else:
+            self.set_base_class()
+
+        self.file_path: Path | None = path
+        self.reference: Reference = reference
+
+        self.reference.source = self
+
+        if extra_template_data is not None:
+            # The supplied defaultdict will either create a new entry,
+            # or already contain a predefined entry for this type
+            self.extra_template_data = extra_template_data[self.reference.path]
+
+            # We use the full object reference path as dictionary key, but
+            # we still support `name` as key because it was used for
+            # `--extra-template-data` input file and we don't want to break the
+            # existing behavior.
+            self.extra_template_data.update(extra_template_data[self.name])
+        else:
+            self.extra_template_data = defaultdict(dict)
+
+        self.fields = self._validate_fields(fields) if fields else []
+
+        for base_class in self.base_classes:
+            if base_class.reference:
+                base_class.reference.children.append(self)
+
+        if extra_template_data is not None:
+            all_model_extra_template_data = extra_template_data.get(ALL_MODEL)
+            if all_model_extra_template_data:
+                # The deepcopy is needed here to ensure that different models don't
+                # end up inadvertently sharing state (such as "base_class_kwargs")
+                self.extra_template_data.update(deepcopy(all_model_extra_template_data))
+
+        self.methods: list[str] = methods or []
+
+        self.description = description
+        for field in self.fields:
+            field.parent = self
+
+        self._additional_imports.extend(self.DEFAULT_IMPORTS)
+        self.default: Any = default
+        self._nullable: bool = nullable
+        self._treat_dot_as_module: bool = treat_dot_as_module
+
+    def _validate_fields(self, fields: list[DataModelFieldBase]) -> list[DataModelFieldBase]:
+        names: set[str] = set()
+        unique_fields: list[DataModelFieldBase] = []
+        for field in fields:
+            if field.name:
+                if field.name in names:
+                    warn(f"Field name `{field.name}` is duplicated on {self.name}", stacklevel=2)
+                    continue
+                names.add(field.name)
+            unique_fields.append(field)
+        return unique_fields
+
+    def set_base_class(self) -> None:
+        base_class = self.custom_base_class or self.BASE_CLASS
+        if not base_class:
+            self.base_classes = []
+            return
+        base_class_import = Import.from_full_path(base_class)
+        self._additional_imports.append(base_class_import)
+        self.base_classes = [BaseClassDataType.from_import(base_class_import)]
+
+    @cached_property
+    def template_file_path(self) -> Path:
+        template_file_path = Path(self.TEMPLATE_FILE_PATH)
+        if self._custom_template_dir is not None:
+            custom_template_file_path = self._custom_template_dir / template_file_path
+            if custom_template_file_path.exists():
+                return custom_template_file_path
+        return template_file_path
+
+    @property
+    def imports(self) -> tuple[Import, ...]:
+        return chain_as_tuple(
+            (i for f in self.fields for i in f.imports),
+            self._additional_imports,
+        )
+
+    @property
+    def reference_classes(self) -> frozenset[str]:
+        return frozenset(
+            {r.reference.path for r in self.base_classes if r.reference}
+            | {t for f in self.fields for t in f.unresolved_types}
+        )
+
+    @property
+    def name(self) -> str:
+        return self.reference.name
+
+    @property
+    def duplicate_name(self) -> str:
+        return self.reference.duplicate_name or ""
+
+    @property
+    def base_class(self) -> str:
+        return ", ".join(b.type_hint for b in self.base_classes)
+
+    @staticmethod
+    def _get_class_name(name: str) -> str:
+        if "." in name:
+            return name.rsplit(".", 1)[-1]
+        return name
+
+    @property
+    def class_name(self) -> str:
+        return self._get_class_name(self.name)
+
+    @class_name.setter
+    def class_name(self, class_name: str) -> None:
+        if "." in self.reference.name:
+            self.reference.name = f"{self.reference.name.rsplit('.', 1)[0]}.{class_name}"
+        else:
+            self.reference.name = class_name
+
+    @property
+    def duplicate_class_name(self) -> str:
+        return self._get_class_name(self.duplicate_name)
+
+    @property
+    def module_path(self) -> list[str]:
+        return get_module_path(self.name, self.file_path, treat_dot_as_module=self._treat_dot_as_module)
+
+    @property
+    def module_name(self) -> str:
+        return get_module_name(self.name, self.file_path, treat_dot_as_module=self._treat_dot_as_module)
+
+    @property
+    def all_data_types(self) -> Iterator[DataType]:
+        for field in self.fields:
+            yield from field.data_type.all_data_types
+        yield from self.base_classes
+
+    @property
+    def nullable(self) -> bool:
+        return self._nullable
+
+    @cached_property
+    def path(self) -> str:
+        return self.reference.path
+
+    def render(self, *, class_name: str | None = None) -> str:
+        return self._render(
+            class_name=class_name or self.class_name,
+            fields=self.fields,
+            decorators=self.decorators,
+            base_class=self.base_class,
+            methods=self.methods,
+            description=self.description,
+            keyword_only=self.keyword_only,
+            frozen=self.frozen,
+            **self.extra_template_data,
+        )
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/dataclass.py 0.34.0-1/src/datamodel_code_generator/model/dataclass.py
--- 0.26.4-3/src/datamodel_code_generator/model/dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/dataclass.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,189 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
+
+from datamodel_code_generator import DatetimeClassType, PythonVersion, PythonVersionMin
+from datamodel_code_generator.imports import (
+    IMPORT_DATE,
+    IMPORT_DATETIME,
+    IMPORT_TIME,
+    IMPORT_TIMEDELTA,
+    Import,
+)
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model.base import UNDEFINED
+from datamodel_code_generator.model.imports import IMPORT_DATACLASS, IMPORT_FIELD
+from datamodel_code_generator.model.types import DataTypeManager as _DataTypeManager
+from datamodel_code_generator.model.types import type_map_factory
+from datamodel_code_generator.types import DataType, StrictTypes, Types, chain_as_tuple
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+    from collections.abc import Sequence
+    from pathlib import Path
+
+    from datamodel_code_generator.reference import Reference
+
+from datamodel_code_generator.model.pydantic.base_model import Constraints  # noqa: TC001
+
+
+def _has_field_assignment(field: DataModelFieldBase) -> bool:
+    return bool(field.field) or not (
+        field.required or (field.represented_default == "None" and field.strip_default_none)
+    )
+
+
+class DataClass(DataModel):
+    TEMPLATE_FILE_PATH: ClassVar[str] = "dataclass.jinja2"
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_DATACLASS,)
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        methods: list[str] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        frozen: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        super().__init__(
+            reference=reference,
+            fields=sorted(fields, key=_has_field_assignment),
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            methods=methods,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            frozen=frozen,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+
+
+class DataModelField(DataModelFieldBase):
+    _FIELD_KEYS: ClassVar[set[str]] = {
+        "default_factory",
+        "init",
+        "repr",
+        "hash",
+        "compare",
+        "metadata",
+        "kw_only",
+    }
+    constraints: Optional[Constraints] = None  # noqa: UP045
+
+    def process_const(self) -> None:
+        if "const" not in self.extras:
+            return
+        self.const = True
+        self.nullable = False
+        const = self.extras["const"]
+        self.data_type = self.data_type.__class__(literals=[const])
+        if not self.default:
+            self.default = const
+
+    @property
+    def imports(self) -> tuple[Import, ...]:
+        field = self.field
+        if field and field.startswith("field("):
+            return chain_as_tuple(super().imports, (IMPORT_FIELD,))
+        return super().imports
+
+    def self_reference(self) -> bool:  # pragma: no cover
+        return isinstance(self.parent, DataClass) and self.parent.reference.path in {
+            d.reference.path for d in self.data_type.all_data_types if d.reference
+        }
+
+    @property
+    def field(self) -> str | None:
+        """for backwards compatibility"""
+        result = str(self)
+        if not result:
+            return None
+        return result
+
+    def __str__(self) -> str:
+        data: dict[str, Any] = {k: v for k, v in self.extras.items() if k in self._FIELD_KEYS}
+
+        if self.default != UNDEFINED and self.default is not None:
+            data["default"] = self.default
+
+        if self.required:
+            data = {
+                k: v
+                for k, v in data.items()
+                if k
+                not in {
+                    "default",
+                    "default_factory",
+                }
+            }
+
+        if not data:
+            return ""
+
+        if len(data) == 1 and "default" in data:
+            default = data["default"]
+
+            if isinstance(default, (list, dict)):
+                return f"field(default_factory=lambda :{default!r})"
+            return repr(default)
+        kwargs = [f"{k}={v if k == 'default_factory' else repr(v)}" for k, v in data.items()]
+        return f"field({', '.join(kwargs)})"
+
+
+class DataTypeManager(_DataTypeManager):
+    def __init__(  # noqa: PLR0913, PLR0917
+        self,
+        python_version: PythonVersion = PythonVersionMin,
+        use_standard_collections: bool = False,  # noqa: FBT001, FBT002
+        use_generic_container_types: bool = False,  # noqa: FBT001, FBT002
+        strict_types: Sequence[StrictTypes] | None = None,
+        use_non_positive_negative_number_constrained_types: bool = False,  # noqa: FBT001, FBT002
+        use_union_operator: bool = False,  # noqa: FBT001, FBT002
+        use_pendulum: bool = False,  # noqa: FBT001, FBT002
+        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
+        treat_dot_as_module: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        super().__init__(
+            python_version,
+            use_standard_collections,
+            use_generic_container_types,
+            strict_types,
+            use_non_positive_negative_number_constrained_types,
+            use_union_operator,
+            use_pendulum,
+            target_datetime_class,
+            treat_dot_as_module,
+        )
+
+        datetime_map = (
+            {
+                Types.time: self.data_type.from_import(IMPORT_TIME),
+                Types.date: self.data_type.from_import(IMPORT_DATE),
+                Types.date_time: self.data_type.from_import(IMPORT_DATETIME),
+                Types.timedelta: self.data_type.from_import(IMPORT_TIMEDELTA),
+            }
+            if target_datetime_class is DatetimeClassType.Datetime
+            else {}
+        )
+
+        self.type_map: dict[Types, DataType] = {
+            **type_map_factory(self.data_type),
+            **datetime_map,
+        }
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/enum.py 0.34.0-1/src/datamodel_code_generator/model/enum.py
--- 0.26.4-3/src/datamodel_code_generator/model/enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/enum.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,120 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
+
+from datamodel_code_generator.imports import IMPORT_ANY, IMPORT_ENUM, Import
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model.base import UNDEFINED, BaseClassDataType
+from datamodel_code_generator.types import DataType, Types
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+    from pathlib import Path
+
+    from datamodel_code_generator.reference import Reference
+
+_INT: str = "int"
+_FLOAT: str = "float"
+_BYTES: str = "bytes"
+_STR: str = "str"
+
+SUBCLASS_BASE_CLASSES: dict[Types, str] = {
+    Types.int32: _INT,
+    Types.int64: _INT,
+    Types.integer: _INT,
+    Types.float: _FLOAT,
+    Types.double: _FLOAT,
+    Types.number: _FLOAT,
+    Types.byte: _BYTES,
+    Types.string: _STR,
+}
+
+
+class Enum(DataModel):
+    TEMPLATE_FILE_PATH: ClassVar[str] = "Enum.jinja2"
+    BASE_CLASS: ClassVar[str] = "enum.Enum"
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_ENUM,)
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        methods: list[str] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        type_: Types | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        super().__init__(
+            reference=reference,
+            fields=fields,
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            methods=methods,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+
+        if not base_classes and type_:
+            base_class = SUBCLASS_BASE_CLASSES.get(type_)
+            if base_class:
+                self.base_classes: list[BaseClassDataType] = [
+                    BaseClassDataType(type=base_class),
+                    *self.base_classes,
+                ]
+
+    @classmethod
+    def get_data_type(cls, types: Types, **kwargs: Any) -> DataType:
+        raise NotImplementedError
+
+    def get_member(self, field: DataModelFieldBase) -> Member:
+        return Member(self, field)
+
+    def find_member(self, value: Any) -> Member | None:
+        repr_value = repr(value)
+        # Remove surrounding quotes from the string representation
+        str_value = str(value).strip("'\"")
+
+        for field in self.fields:
+            # Remove surrounding quotes from field default value
+            field_default = str(field.default or "").strip("'\"")
+
+            # Compare values after removing quotes
+            if field_default == str_value:
+                return self.get_member(field)
+
+            # Keep original comparison for backwards compatibility
+            if field.default == repr_value:  # pragma: no cover
+                return self.get_member(field)
+
+        return None
+
+    @property
+    def imports(self) -> tuple[Import, ...]:
+        return tuple(i for i in super().imports if i != IMPORT_ANY)
+
+
+class Member:
+    def __init__(self, enum: Enum, field: DataModelFieldBase) -> None:
+        self.enum: Enum = enum
+        self.field: DataModelFieldBase = field
+        self.alias: Optional[str] = None  # noqa: UP045
+
+    def __repr__(self) -> str:
+        return f"{self.alias or self.enum.name}.{self.field.name}"
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/imports.py 0.34.0-1/src/datamodel_code_generator/model/imports.py
--- 0.26.4-3/src/datamodel_code_generator/model/imports.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/imports.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+from __future__ import annotations
+
+from datamodel_code_generator.imports import Import
+
+IMPORT_DATACLASS = Import.from_full_path("dataclasses.dataclass")
+IMPORT_FIELD = Import.from_full_path("dataclasses.field")
+IMPORT_CLASSVAR = Import.from_full_path("typing.ClassVar")
+IMPORT_TYPED_DICT = Import.from_full_path("typing.TypedDict")
+IMPORT_TYPED_DICT_BACKPORT = Import.from_full_path("typing_extensions.TypedDict")
+IMPORT_NOT_REQUIRED = Import.from_full_path("typing.NotRequired")
+IMPORT_NOT_REQUIRED_BACKPORT = Import.from_full_path("typing_extensions.NotRequired")
+IMPORT_MSGSPEC_STRUCT = Import.from_full_path("msgspec.Struct")
+IMPORT_MSGSPEC_FIELD = Import.from_full_path("msgspec.field")
+IMPORT_MSGSPEC_META = Import.from_full_path("msgspec.Meta")
+IMPORT_MSGSPEC_CONVERT = Import.from_full_path("msgspec.convert")
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/msgspec.py 0.34.0-1/src/datamodel_code_generator/model/msgspec.py
--- 0.26.4-3/src/datamodel_code_generator/model/msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/msgspec.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,320 @@
+from __future__ import annotations
+
+from functools import wraps
+from typing import TYPE_CHECKING, Any, ClassVar, Optional, TypeVar
+
+from pydantic import Field
+
+from datamodel_code_generator import DatetimeClassType, PythonVersion, PythonVersionMin
+from datamodel_code_generator.imports import (
+    IMPORT_DATE,
+    IMPORT_DATETIME,
+    IMPORT_TIME,
+    IMPORT_TIMEDELTA,
+    Import,
+)
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model.base import UNDEFINED
+from datamodel_code_generator.model.imports import (
+    IMPORT_CLASSVAR,
+    IMPORT_MSGSPEC_CONVERT,
+    IMPORT_MSGSPEC_FIELD,
+    IMPORT_MSGSPEC_META,
+)
+from datamodel_code_generator.model.pydantic.base_model import (
+    Constraints as _Constraints,
+)
+from datamodel_code_generator.model.rootmodel import RootModel as _RootModel
+from datamodel_code_generator.model.types import DataTypeManager as _DataTypeManager
+from datamodel_code_generator.model.types import type_map_factory
+from datamodel_code_generator.types import (
+    DataType,
+    StrictTypes,
+    Types,
+    chain_as_tuple,
+    get_optional_type,
+)
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+    from collections.abc import Sequence
+    from pathlib import Path
+
+    from datamodel_code_generator.reference import Reference
+
+
+def _has_field_assignment(field: DataModelFieldBase) -> bool:
+    return not (field.required or (field.represented_default == "None" and field.strip_default_none))
+
+
+DataModelFieldBaseT = TypeVar("DataModelFieldBaseT", bound=DataModelFieldBase)
+
+
+def import_extender(cls: type[DataModelFieldBaseT]) -> type[DataModelFieldBaseT]:
+    original_imports: property = cls.imports
+
+    @wraps(original_imports.fget)  # pyright: ignore[reportArgumentType]
+    def new_imports(self: DataModelFieldBaseT) -> tuple[Import, ...]:
+        extra_imports = []
+        field = self.field
+        # TODO: Improve field detection
+        if field and field.startswith("field("):
+            extra_imports.append(IMPORT_MSGSPEC_FIELD)
+        if self.field and "lambda: convert" in self.field:
+            extra_imports.append(IMPORT_MSGSPEC_CONVERT)
+        if self.annotated:
+            extra_imports.append(IMPORT_MSGSPEC_META)
+        if self.extras.get("is_classvar"):
+            extra_imports.append(IMPORT_CLASSVAR)
+        return chain_as_tuple(original_imports.fget(self), extra_imports)  # pyright: ignore[reportOptionalCall]
+
+    cls.imports = property(new_imports)  # pyright: ignore[reportAttributeAccessIssue]
+    return cls
+
+
+class RootModel(_RootModel):
+    pass
+
+
+class Struct(DataModel):
+    TEMPLATE_FILE_PATH: ClassVar[str] = "msgspec.jinja2"
+    BASE_CLASS: ClassVar[str] = "msgspec.Struct"
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = ()
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        methods: list[str] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        super().__init__(
+            reference=reference,
+            fields=sorted(fields, key=_has_field_assignment),
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            methods=methods,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+        self.extra_template_data.setdefault("base_class_kwargs", {})
+        if self.keyword_only:
+            self.add_base_class_kwarg("kw_only", "True")
+
+    def add_base_class_kwarg(self, name: str, value: str) -> None:
+        self.extra_template_data["base_class_kwargs"][name] = value
+
+
+class Constraints(_Constraints):
+    # To override existing pattern alias
+    regex: Optional[str] = Field(None, alias="regex")  # noqa: UP045
+    pattern: Optional[str] = Field(None, alias="pattern")  # noqa: UP045
+
+
+@import_extender
+class DataModelField(DataModelFieldBase):
+    _FIELD_KEYS: ClassVar[set[str]] = {
+        "default",
+        "default_factory",
+    }
+    _META_FIELD_KEYS: ClassVar[set[str]] = {
+        "title",
+        "description",
+        "gt",
+        "ge",
+        "lt",
+        "le",
+        "multiple_of",
+        # 'min_items', # not supported by msgspec
+        # 'max_items', # not supported by msgspec
+        "min_length",
+        "max_length",
+        "pattern",
+        "examples",
+        # 'unique_items', # not supported by msgspec
+    }
+    _PARSE_METHOD = "convert"
+    _COMPARE_EXPRESSIONS: ClassVar[set[str]] = {"gt", "ge", "lt", "le", "multiple_of"}
+    constraints: Optional[Constraints] = None  # noqa: UP045
+
+    def self_reference(self) -> bool:  # pragma: no cover
+        return isinstance(self.parent, Struct) and self.parent.reference.path in {
+            d.reference.path for d in self.data_type.all_data_types if d.reference
+        }
+
+    def process_const(self) -> None:
+        if "const" not in self.extras:
+            return
+        self.const = True
+        self.nullable = False
+        const = self.extras["const"]
+        if self.data_type.type == "str" and isinstance(const, str):  # pragma: no cover # Literal supports only str
+            self.data_type = self.data_type.__class__(literals=[const])
+
+    def _get_strict_field_constraint_value(self, constraint: str, value: Any) -> Any:
+        if value is None or constraint not in self._COMPARE_EXPRESSIONS:
+            return value
+
+        if any(data_type.type == "float" for data_type in self.data_type.all_data_types):
+            return float(value)
+        return int(value)
+
+    @property
+    def field(self) -> str | None:
+        """for backwards compatibility"""
+        result = str(self)
+        if not result:
+            return None
+        return result
+
+    def __str__(self) -> str:
+        data: dict[str, Any] = {k: v for k, v in self.extras.items() if k in self._FIELD_KEYS}
+        if self.alias:
+            data["name"] = self.alias
+
+        if self.default != UNDEFINED and self.default is not None:
+            data["default"] = self.default
+        elif not self.required:
+            data["default"] = None
+
+        if self.required:
+            data = {
+                k: v
+                for k, v in data.items()
+                if k
+                not in {
+                    "default",
+                    "default_factory",
+                }
+            }
+        elif self.default and "default_factory" not in data:
+            default_factory = self._get_default_as_struct_model()
+            if default_factory is not None:
+                data.pop("default")
+                data["default_factory"] = default_factory
+
+        if not data:
+            return ""
+
+        if len(data) == 1 and "default" in data:
+            return repr(data["default"])
+
+        kwargs = [f"{k}={v if k == 'default_factory' else repr(v)}" for k, v in data.items()]
+        return f"field({', '.join(kwargs)})"
+
+    @property
+    def annotated(self) -> str | None:
+        if not self.use_annotated:  # pragma: no cover
+            return None
+
+        data: dict[str, Any] = {k: v for k, v in self.extras.items() if k in self._META_FIELD_KEYS}
+        if self.constraints is not None and not self.self_reference() and not self.data_type.strict:
+            data = {
+                **data,
+                **{
+                    k: self._get_strict_field_constraint_value(k, v)
+                    for k, v in self.constraints.dict().items()
+                    if k in self._META_FIELD_KEYS
+                },
+            }
+
+        meta_arguments = sorted(f"{k}={v!r}" for k, v in data.items() if v is not None)
+        if not meta_arguments:
+            return None
+
+        meta = f"Meta({', '.join(meta_arguments)})"
+
+        if not self.required and not self.extras.get("is_classvar"):
+            type_hint = self.data_type.type_hint
+            annotated_type = f"Annotated[{type_hint}, {meta}]"
+            return get_optional_type(annotated_type, self.data_type.use_union_operator)
+
+        annotated_type = f"Annotated[{self.type_hint}, {meta}]"
+        if self.extras.get("is_classvar"):
+            annotated_type = f"ClassVar[{annotated_type}]"
+
+        return annotated_type
+
+    def _get_default_as_struct_model(self) -> str | None:
+        for data_type in self.data_type.data_types or (self.data_type,):
+            # TODO: Check nested data_types
+            if data_type.is_dict or self.data_type.is_union:
+                # TODO: Parse Union and dict model for default
+                continue  # pragma: no cover
+            if data_type.is_list and len(data_type.data_types) == 1:
+                data_type_child = data_type.data_types[0]
+                if (  # pragma: no cover
+                    data_type_child.reference
+                    and (isinstance(data_type_child.reference.source, (Struct, RootModel)))
+                    and isinstance(self.default, list)
+                ):
+                    return (
+                        f"lambda: {self._PARSE_METHOD}({self.default!r},  "
+                        f"type=list[{data_type_child.alias or data_type_child.reference.source.class_name}])"
+                    )
+            elif data_type.reference and isinstance(data_type.reference.source, Struct):
+                return (
+                    f"lambda: {self._PARSE_METHOD}({self.default!r},  "
+                    f"type={data_type.alias or data_type.reference.source.class_name})"
+                )
+        return None
+
+
+class DataTypeManager(_DataTypeManager):
+    def __init__(  # noqa: PLR0913, PLR0917
+        self,
+        python_version: PythonVersion = PythonVersionMin,
+        use_standard_collections: bool = False,  # noqa: FBT001, FBT002
+        use_generic_container_types: bool = False,  # noqa: FBT001, FBT002
+        strict_types: Sequence[StrictTypes] | None = None,
+        use_non_positive_negative_number_constrained_types: bool = False,  # noqa: FBT001, FBT002
+        use_union_operator: bool = False,  # noqa: FBT001, FBT002
+        use_pendulum: bool = False,  # noqa: FBT001, FBT002
+        target_datetime_class: DatetimeClassType | None = None,
+        treat_dot_as_module: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        super().__init__(
+            python_version,
+            use_standard_collections,
+            use_generic_container_types,
+            strict_types,
+            use_non_positive_negative_number_constrained_types,
+            use_union_operator,
+            use_pendulum,
+            target_datetime_class,
+            treat_dot_as_module,
+        )
+
+        datetime_map = (
+            {
+                Types.time: self.data_type.from_import(IMPORT_TIME),
+                Types.date: self.data_type.from_import(IMPORT_DATE),
+                Types.date_time: self.data_type.from_import(IMPORT_DATETIME),
+                Types.timedelta: self.data_type.from_import(IMPORT_TIMEDELTA),
+            }
+            if target_datetime_class is DatetimeClassType.Datetime
+            else {}
+        )
+
+        self.type_map: dict[Types, DataType] = {
+            **type_map_factory(self.data_type),
+            **datetime_map,
+        }
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic/__init__.py 0.34.0-1/src/datamodel_code_generator/model/pydantic/__init__.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/pydantic/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,38 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Optional
+
+from pydantic import BaseModel as _BaseModel
+
+from .base_model import BaseModel, DataModelField
+from .custom_root_type import CustomRootType
+from .dataclass import DataClass
+from .types import DataTypeManager
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable
+
+
+def dump_resolve_reference_action(class_names: Iterable[str]) -> str:
+    return "\n".join(f"{class_name}.update_forward_refs()" for class_name in class_names)
+
+
+class Config(_BaseModel):
+    extra: Optional[str] = None  # noqa: UP045
+    title: Optional[str] = None  # noqa: UP045
+    allow_population_by_field_name: Optional[bool] = None  # noqa: UP045
+    allow_extra_fields: Optional[bool] = None  # noqa: UP045
+    extra_fields: Optional[str] = None  # noqa: UP045
+    allow_mutation: Optional[bool] = None  # noqa: UP045
+    arbitrary_types_allowed: Optional[bool] = None  # noqa: UP045
+    orm_mode: Optional[bool] = None  # noqa: UP045
+
+
+__all__ = [
+    "BaseModel",
+    "CustomRootType",
+    "DataClass",
+    "DataModelField",
+    "DataTypeManager",
+    "dump_resolve_reference_action",
+]
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic/base_model.py 0.34.0-1/src/datamodel_code_generator/model/pydantic/base_model.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic/base_model.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/pydantic/base_model.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,318 @@
+from __future__ import annotations
+
+from abc import ABC
+from functools import cached_property
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
+
+from pydantic import Field
+
+from datamodel_code_generator.model import (
+    ConstraintsBase,
+    DataModel,
+    DataModelFieldBase,
+)
+from datamodel_code_generator.model.base import UNDEFINED
+from datamodel_code_generator.model.pydantic.imports import (
+    IMPORT_ANYURL,
+    IMPORT_EXTRA,
+    IMPORT_FIELD,
+)
+from datamodel_code_generator.types import UnionIntFloat, chain_as_tuple
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+
+    from datamodel_code_generator.imports import Import
+    from datamodel_code_generator.reference import Reference
+
+
+class Constraints(ConstraintsBase):
+    gt: Optional[UnionIntFloat] = Field(None, alias="exclusiveMinimum")  # noqa: UP045
+    ge: Optional[UnionIntFloat] = Field(None, alias="minimum")  # noqa: UP045
+    lt: Optional[UnionIntFloat] = Field(None, alias="exclusiveMaximum")  # noqa: UP045
+    le: Optional[UnionIntFloat] = Field(None, alias="maximum")  # noqa: UP045
+    multiple_of: Optional[float] = Field(None, alias="multipleOf")  # noqa: UP045
+    min_items: Optional[int] = Field(None, alias="minItems")  # noqa: UP045
+    max_items: Optional[int] = Field(None, alias="maxItems")  # noqa: UP045
+    min_length: Optional[int] = Field(None, alias="minLength")  # noqa: UP045
+    max_length: Optional[int] = Field(None, alias="maxLength")  # noqa: UP045
+    regex: Optional[str] = Field(None, alias="pattern")  # noqa: UP045
+
+
+class DataModelField(DataModelFieldBase):
+    _EXCLUDE_FIELD_KEYS: ClassVar[set[str]] = {
+        "alias",
+        "default",
+        "const",
+        "gt",
+        "ge",
+        "lt",
+        "le",
+        "multiple_of",
+        "min_items",
+        "max_items",
+        "min_length",
+        "max_length",
+        "regex",
+    }
+    _COMPARE_EXPRESSIONS: ClassVar[set[str]] = {"gt", "ge", "lt", "le"}
+    constraints: Optional[Constraints] = None  # noqa: UP045
+    _PARSE_METHOD: ClassVar[str] = "parse_obj"
+
+    @property
+    def method(self) -> str | None:
+        return self.validator
+
+    @property
+    def validator(self) -> str | None:
+        return None
+        # TODO refactor this method for other validation logic
+
+    @property
+    def field(self) -> str | None:
+        """for backwards compatibility"""
+        result = str(self)
+        if (
+            self.use_default_kwarg
+            and not result.startswith("Field(...")
+            and not result.startswith("Field(default_factory=")
+        ):
+            # Use `default=` for fields that have a default value so that type
+            # checkers using @dataclass_transform can infer the field as
+            # optional in __init__.
+            result = result.replace("Field(", "Field(default=")
+        if not result:
+            return None
+        return result
+
+    def self_reference(self) -> bool:
+        return isinstance(self.parent, BaseModelBase) and self.parent.reference.path in {
+            d.reference.path for d in self.data_type.all_data_types if d.reference
+        }
+
+    def _get_strict_field_constraint_value(self, constraint: str, value: Any) -> Any:
+        if value is None or constraint not in self._COMPARE_EXPRESSIONS:
+            return value
+
+        if any(data_type.type == "float" for data_type in self.data_type.all_data_types):
+            return float(value)
+        return int(value)
+
+    def _get_default_as_pydantic_model(self) -> str | None:
+        for data_type in self.data_type.data_types or (self.data_type,):
+            # TODO: Check nested data_types
+            if data_type.is_dict or self.data_type.is_union:
+                # TODO: Parse Union and dict model for default
+                continue
+            if data_type.is_list and len(data_type.data_types) == 1:
+                data_type_child = data_type.data_types[0]
+                if (
+                    data_type_child.reference
+                    and isinstance(data_type_child.reference.source, BaseModelBase)
+                    and isinstance(self.default, list)
+                ):  # pragma: no cover
+                    return (
+                        f"lambda :[{data_type_child.alias or data_type_child.reference.source.class_name}."
+                        f"{self._PARSE_METHOD}(v) for v in {self.default!r}]"
+                    )
+            elif data_type.reference and isinstance(data_type.reference.source, BaseModelBase):  # pragma: no cover
+                return (
+                    f"lambda :{data_type.alias or data_type.reference.source.class_name}."
+                    f"{self._PARSE_METHOD}({self.default!r})"
+                )
+        return None
+
+    def _process_data_in_str(self, data: dict[str, Any]) -> None:
+        if self.const:
+            data["const"] = True
+
+    def _process_annotated_field_arguments(self, field_arguments: list[str]) -> list[str]:  # noqa: PLR6301
+        return field_arguments
+
+    def __str__(self) -> str:  # noqa: PLR0912
+        data: dict[str, Any] = {k: v for k, v in self.extras.items() if k not in self._EXCLUDE_FIELD_KEYS}
+        if self.alias:
+            data["alias"] = self.alias
+        if self.constraints is not None and not self.self_reference() and not self.data_type.strict:
+            data = {
+                **data,
+                **(
+                    {}
+                    if any(d.import_ == IMPORT_ANYURL for d in self.data_type.all_data_types)
+                    else {
+                        k: self._get_strict_field_constraint_value(k, v)
+                        for k, v in self.constraints.dict(exclude_unset=True).items()
+                    }
+                ),
+            }
+
+        if self.use_field_description:
+            data.pop("description", None)  # Description is part of field docstring
+
+        self._process_data_in_str(data)
+
+        discriminator = data.pop("discriminator", None)
+        if discriminator:
+            if isinstance(discriminator, str):
+                data["discriminator"] = discriminator
+            elif isinstance(discriminator, dict):  # pragma: no cover
+                data["discriminator"] = discriminator["propertyName"]
+
+        if self.required:
+            default_factory = None
+        elif self.default and "default_factory" not in data:
+            default_factory = self._get_default_as_pydantic_model()
+        else:
+            default_factory = data.pop("default_factory", None)
+
+        field_arguments = sorted(f"{k}={v!r}" for k, v in data.items() if v is not None)
+
+        if not field_arguments and not default_factory:
+            if self.nullable and self.required:
+                return "Field(...)"  # Field() is for mypy
+            return ""
+
+        if self.use_annotated:
+            field_arguments = self._process_annotated_field_arguments(field_arguments)
+        elif self.required:
+            field_arguments = ["...", *field_arguments]
+        elif default_factory:
+            field_arguments = [f"default_factory={default_factory}", *field_arguments]
+        else:
+            field_arguments = [f"{self.default!r}", *field_arguments]
+
+        return f"Field({', '.join(field_arguments)})"
+
+    @property
+    def annotated(self) -> str | None:
+        if not self.use_annotated or not str(self):
+            return None
+        return f"Annotated[{self.type_hint}, {self!s}]"
+
+    @property
+    def imports(self) -> tuple[Import, ...]:
+        if self.field:
+            return chain_as_tuple(super().imports, (IMPORT_FIELD,))
+        return super().imports
+
+
+class BaseModelBase(DataModel, ABC):
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, Any] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        methods: list[str] = [field.method for field in fields if field.method]
+
+        super().__init__(
+            fields=fields,
+            reference=reference,
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            methods=methods,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+
+    @cached_property
+    def template_file_path(self) -> Path:
+        # This property is for Backward compatibility
+        # Current version supports '{custom_template_dir}/BaseModel.jinja'
+        # But, Future version will support only '{custom_template_dir}/pydantic/BaseModel.jinja'
+        if self._custom_template_dir is not None:
+            custom_template_file_path = self._custom_template_dir / Path(self.TEMPLATE_FILE_PATH).name
+            if custom_template_file_path.exists():
+                return custom_template_file_path
+        return super().template_file_path
+
+
+class BaseModel(BaseModelBase):
+    TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic/BaseModel.jinja2"
+    BASE_CLASS: ClassVar[str] = "pydantic.BaseModel"
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, Any] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        super().__init__(
+            reference=reference,
+            fields=fields,
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+        config_parameters: dict[str, Any] = {}
+
+        additional_properties = self.extra_template_data.get("additionalProperties")
+        allow_extra_fields = self.extra_template_data.get("allow_extra_fields")
+        extra_fields = self.extra_template_data.get("extra_fields")
+
+        if allow_extra_fields or extra_fields or additional_properties is not None:
+            self._additional_imports.append(IMPORT_EXTRA)
+
+        if allow_extra_fields:
+            config_parameters["extra"] = "Extra.allow"
+        elif extra_fields:
+            config_parameters["extra"] = f"Extra.{extra_fields}"
+        elif additional_properties is True:
+            config_parameters["extra"] = "Extra.allow"
+        elif additional_properties is False:
+            config_parameters["extra"] = "Extra.forbid"
+
+        for config_attribute in "allow_population_by_field_name", "allow_mutation":
+            if config_attribute in self.extra_template_data:
+                config_parameters[config_attribute] = self.extra_template_data[config_attribute]
+        for data_type in self.all_data_types:
+            if data_type.is_custom_type:
+                config_parameters["arbitrary_types_allowed"] = True
+                break
+
+        if isinstance(self.extra_template_data.get("config"), dict):
+            for key, value in self.extra_template_data["config"].items():
+                config_parameters[key] = value  # noqa: PERF403
+
+        if config_parameters:
+            from datamodel_code_generator.model.pydantic import Config  # noqa: PLC0415
+
+            self.extra_template_data["config"] = Config.parse_obj(config_parameters)  # pyright: ignore[reportArgumentType]
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic/custom_root_type.py 0.34.0-1/src/datamodel_code_generator/model/pydantic/custom_root_type.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic/custom_root_type.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/pydantic/custom_root_type.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,10 @@
+from __future__ import annotations
+
+from typing import ClassVar
+
+from datamodel_code_generator.model.pydantic.base_model import BaseModel
+
+
+class CustomRootType(BaseModel):
+    TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic/BaseModel_root.jinja2"
+    BASE_CLASS: ClassVar[str] = "pydantic.BaseModel"
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic/dataclass.py 0.34.0-1/src/datamodel_code_generator/model/pydantic/dataclass.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic/dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/pydantic/dataclass.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, ClassVar
+
+from datamodel_code_generator.model import DataModel
+from datamodel_code_generator.model.pydantic.imports import IMPORT_DATACLASS
+
+if TYPE_CHECKING:
+    from datamodel_code_generator.imports import Import
+
+
+class DataClass(DataModel):
+    TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic/dataclass.jinja2"
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_DATACLASS,)
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic/imports.py 0.34.0-1/src/datamodel_code_generator/model/pydantic/imports.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic/imports.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/pydantic/imports.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,37 @@
+from __future__ import annotations
+
+from datamodel_code_generator.imports import Import
+
+IMPORT_CONSTR = Import.from_full_path("pydantic.constr")
+IMPORT_CONINT = Import.from_full_path("pydantic.conint")
+IMPORT_CONFLOAT = Import.from_full_path("pydantic.confloat")
+IMPORT_CONDECIMAL = Import.from_full_path("pydantic.condecimal")
+IMPORT_CONBYTES = Import.from_full_path("pydantic.conbytes")
+IMPORT_POSITIVE_INT = Import.from_full_path("pydantic.PositiveInt")
+IMPORT_NEGATIVE_INT = Import.from_full_path("pydantic.NegativeInt")
+IMPORT_NON_POSITIVE_INT = Import.from_full_path("pydantic.NonPositiveInt")
+IMPORT_NON_NEGATIVE_INT = Import.from_full_path("pydantic.NonNegativeInt")
+IMPORT_POSITIVE_FLOAT = Import.from_full_path("pydantic.PositiveFloat")
+IMPORT_NEGATIVE_FLOAT = Import.from_full_path("pydantic.NegativeFloat")
+IMPORT_NON_NEGATIVE_FLOAT = Import.from_full_path("pydantic.NonNegativeFloat")
+IMPORT_NON_POSITIVE_FLOAT = Import.from_full_path("pydantic.NonPositiveFloat")
+IMPORT_SECRET_STR = Import.from_full_path("pydantic.SecretStr")
+IMPORT_EMAIL_STR = Import.from_full_path("pydantic.EmailStr")
+IMPORT_UUID1 = Import.from_full_path("pydantic.UUID1")
+IMPORT_UUID2 = Import.from_full_path("pydantic.UUID2")
+IMPORT_UUID3 = Import.from_full_path("pydantic.UUID3")
+IMPORT_UUID4 = Import.from_full_path("pydantic.UUID4")
+IMPORT_UUID5 = Import.from_full_path("pydantic.UUID5")
+IMPORT_ANYURL = Import.from_full_path("pydantic.AnyUrl")
+IMPORT_IPV4ADDRESS = Import.from_full_path("ipaddress.IPv4Address")
+IMPORT_IPV6ADDRESS = Import.from_full_path("ipaddress.IPv6Address")
+IMPORT_IPV4NETWORKS = Import.from_full_path("ipaddress.IPv4Network")
+IMPORT_IPV6NETWORKS = Import.from_full_path("ipaddress.IPv6Network")
+IMPORT_EXTRA = Import.from_full_path("pydantic.Extra")
+IMPORT_FIELD = Import.from_full_path("pydantic.Field")
+IMPORT_STRICT_INT = Import.from_full_path("pydantic.StrictInt")
+IMPORT_STRICT_FLOAT = Import.from_full_path("pydantic.StrictFloat")
+IMPORT_STRICT_STR = Import.from_full_path("pydantic.StrictStr")
+IMPORT_STRICT_BOOL = Import.from_full_path("pydantic.StrictBool")
+IMPORT_STRICT_BYTES = Import.from_full_path("pydantic.StrictBytes")
+IMPORT_DATACLASS = Import.from_full_path("pydantic.dataclasses.dataclass")
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic/types.py 0.34.0-1/src/datamodel_code_generator/model/pydantic/types.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic/types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/pydantic/types.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,325 @@
+from __future__ import annotations
+
+from decimal import Decimal
+from typing import TYPE_CHECKING, Any, ClassVar
+
+from datamodel_code_generator.format import DatetimeClassType, PythonVersion, PythonVersionMin
+from datamodel_code_generator.imports import (
+    IMPORT_ANY,
+    IMPORT_DATE,
+    IMPORT_DATETIME,
+    IMPORT_DECIMAL,
+    IMPORT_PATH,
+    IMPORT_PENDULUM_DATE,
+    IMPORT_PENDULUM_DATETIME,
+    IMPORT_PENDULUM_DURATION,
+    IMPORT_PENDULUM_TIME,
+    IMPORT_TIME,
+    IMPORT_TIMEDELTA,
+    IMPORT_UUID,
+)
+from datamodel_code_generator.model.pydantic.imports import (
+    IMPORT_ANYURL,
+    IMPORT_CONBYTES,
+    IMPORT_CONDECIMAL,
+    IMPORT_CONFLOAT,
+    IMPORT_CONINT,
+    IMPORT_CONSTR,
+    IMPORT_EMAIL_STR,
+    IMPORT_IPV4ADDRESS,
+    IMPORT_IPV4NETWORKS,
+    IMPORT_IPV6ADDRESS,
+    IMPORT_IPV6NETWORKS,
+    IMPORT_NEGATIVE_FLOAT,
+    IMPORT_NEGATIVE_INT,
+    IMPORT_NON_NEGATIVE_FLOAT,
+    IMPORT_NON_NEGATIVE_INT,
+    IMPORT_NON_POSITIVE_FLOAT,
+    IMPORT_NON_POSITIVE_INT,
+    IMPORT_POSITIVE_FLOAT,
+    IMPORT_POSITIVE_INT,
+    IMPORT_SECRET_STR,
+    IMPORT_STRICT_BOOL,
+    IMPORT_STRICT_BYTES,
+    IMPORT_STRICT_FLOAT,
+    IMPORT_STRICT_INT,
+    IMPORT_STRICT_STR,
+    IMPORT_UUID1,
+    IMPORT_UUID2,
+    IMPORT_UUID3,
+    IMPORT_UUID4,
+    IMPORT_UUID5,
+)
+from datamodel_code_generator.types import DataType, StrictTypes, Types, UnionIntFloat
+from datamodel_code_generator.types import DataTypeManager as _DataTypeManager
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+
+def type_map_factory(
+    data_type: type[DataType],
+    strict_types: Sequence[StrictTypes],
+    pattern_key: str,
+    use_pendulum: bool,  # noqa: FBT001
+) -> dict[Types, DataType]:
+    data_type_int = data_type(type="int")
+    data_type_float = data_type(type="float")
+    data_type_str = data_type(type="str")
+    result = {
+        Types.integer: data_type_int,
+        Types.int32: data_type_int,
+        Types.int64: data_type_int,
+        Types.number: data_type_float,
+        Types.float: data_type_float,
+        Types.double: data_type_float,
+        Types.decimal: data_type.from_import(IMPORT_DECIMAL),
+        Types.time: data_type.from_import(IMPORT_TIME),
+        Types.string: data_type_str,
+        Types.byte: data_type_str,  # base64 encoded string
+        Types.binary: data_type(type="bytes"),
+        Types.date: data_type.from_import(IMPORT_DATE),
+        Types.date_time: data_type.from_import(IMPORT_DATETIME),
+        Types.timedelta: data_type.from_import(IMPORT_TIMEDELTA),
+        Types.path: data_type.from_import(IMPORT_PATH),
+        Types.password: data_type.from_import(IMPORT_SECRET_STR),
+        Types.email: data_type.from_import(IMPORT_EMAIL_STR),
+        Types.uuid: data_type.from_import(IMPORT_UUID),
+        Types.uuid1: data_type.from_import(IMPORT_UUID1),
+        Types.uuid2: data_type.from_import(IMPORT_UUID2),
+        Types.uuid3: data_type.from_import(IMPORT_UUID3),
+        Types.uuid4: data_type.from_import(IMPORT_UUID4),
+        Types.uuid5: data_type.from_import(IMPORT_UUID5),
+        Types.uri: data_type.from_import(IMPORT_ANYURL),
+        Types.hostname: data_type.from_import(
+            IMPORT_CONSTR,
+            strict=StrictTypes.str in strict_types,
+            # https://github.com/horejsek/python-fastjsonschema/blob/61c6997a8348b8df9b22e029ca2ba35ef441fbb8/fastjsonschema/draft04.py#L31
+            kwargs={
+                pattern_key: r"r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*"
+                r"([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])\Z'",
+                **({"strict": True} if StrictTypes.str in strict_types else {}),
+            },
+        ),
+        Types.ipv4: data_type.from_import(IMPORT_IPV4ADDRESS),
+        Types.ipv6: data_type.from_import(IMPORT_IPV6ADDRESS),
+        Types.ipv4_network: data_type.from_import(IMPORT_IPV4NETWORKS),
+        Types.ipv6_network: data_type.from_import(IMPORT_IPV6NETWORKS),
+        Types.boolean: data_type(type="bool"),
+        Types.object: data_type.from_import(IMPORT_ANY, is_dict=True),
+        Types.null: data_type(type="None"),
+        Types.array: data_type.from_import(IMPORT_ANY, is_list=True),
+        Types.any: data_type.from_import(IMPORT_ANY),
+    }
+    if use_pendulum:
+        result[Types.date] = data_type.from_import(IMPORT_PENDULUM_DATE)
+        result[Types.date_time] = data_type.from_import(IMPORT_PENDULUM_DATETIME)
+        result[Types.time] = data_type.from_import(IMPORT_PENDULUM_TIME)
+        result[Types.timedelta] = data_type.from_import(IMPORT_PENDULUM_DURATION)
+
+    return result
+
+
+def strict_type_map_factory(data_type: type[DataType]) -> dict[StrictTypes, DataType]:
+    return {
+        StrictTypes.int: data_type.from_import(IMPORT_STRICT_INT, strict=True),
+        StrictTypes.float: data_type.from_import(IMPORT_STRICT_FLOAT, strict=True),
+        StrictTypes.bytes: data_type.from_import(IMPORT_STRICT_BYTES, strict=True),
+        StrictTypes.bool: data_type.from_import(IMPORT_STRICT_BOOL, strict=True),
+        StrictTypes.str: data_type.from_import(IMPORT_STRICT_STR, strict=True),
+    }
+
+
+number_kwargs: set[str] = {
+    "exclusiveMinimum",
+    "minimum",
+    "exclusiveMaximum",
+    "maximum",
+    "multipleOf",
+}
+
+string_kwargs: set[str] = {"minItems", "maxItems", "minLength", "maxLength", "pattern"}
+
+bytes_kwargs: set[str] = {"minLength", "maxLength"}
+
+escape_characters = str.maketrans({
+    "'": r"\'",
+    "\b": r"\b",
+    "\f": r"\f",
+    "\n": r"\n",
+    "\r": r"\r",
+    "\t": r"\t",
+})
+
+
+class DataTypeManager(_DataTypeManager):
+    PATTERN_KEY: ClassVar[str] = "regex"
+
+    def __init__(  # noqa: PLR0913, PLR0917
+        self,
+        python_version: PythonVersion = PythonVersionMin,
+        use_standard_collections: bool = False,  # noqa: FBT001, FBT002
+        use_generic_container_types: bool = False,  # noqa: FBT001, FBT002
+        strict_types: Sequence[StrictTypes] | None = None,
+        use_non_positive_negative_number_constrained_types: bool = False,  # noqa: FBT001, FBT002
+        use_union_operator: bool = False,  # noqa: FBT001, FBT002
+        use_pendulum: bool = False,  # noqa: FBT001, FBT002
+        target_datetime_class: DatetimeClassType | None = None,
+        treat_dot_as_module: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        super().__init__(
+            python_version,
+            use_standard_collections,
+            use_generic_container_types,
+            strict_types,
+            use_non_positive_negative_number_constrained_types,
+            use_union_operator,
+            use_pendulum,
+            target_datetime_class,
+            treat_dot_as_module,
+        )
+
+        self.type_map: dict[Types, DataType] = self.type_map_factory(
+            self.data_type,
+            strict_types=self.strict_types,
+            pattern_key=self.PATTERN_KEY,
+            target_datetime_class=self.target_datetime_class,
+        )
+        self.strict_type_map: dict[StrictTypes, DataType] = strict_type_map_factory(
+            self.data_type,
+        )
+
+        self.kwargs_schema_to_model: dict[str, str] = {
+            "exclusiveMinimum": "gt",
+            "minimum": "ge",
+            "exclusiveMaximum": "lt",
+            "maximum": "le",
+            "multipleOf": "multiple_of",
+            "minItems": "min_items",
+            "maxItems": "max_items",
+            "minLength": "min_length",
+            "maxLength": "max_length",
+            "pattern": self.PATTERN_KEY,
+        }
+
+    def type_map_factory(
+        self,
+        data_type: type[DataType],
+        strict_types: Sequence[StrictTypes],
+        pattern_key: str,
+        target_datetime_class: DatetimeClassType | None,  # noqa: ARG002
+    ) -> dict[Types, DataType]:
+        return type_map_factory(
+            data_type,
+            strict_types,
+            pattern_key,
+            self.use_pendulum,
+        )
+
+    def transform_kwargs(self, kwargs: dict[str, Any], filter_: set[str]) -> dict[str, str]:
+        return {self.kwargs_schema_to_model.get(k, k): v for (k, v) in kwargs.items() if v is not None and k in filter_}
+
+    def get_data_int_type(  # noqa: PLR0911
+        self,
+        types: Types,
+        **kwargs: Any,
+    ) -> DataType:
+        data_type_kwargs: dict[str, Any] = self.transform_kwargs(kwargs, number_kwargs)
+        strict = StrictTypes.int in self.strict_types
+        if data_type_kwargs:
+            if not strict:
+                if data_type_kwargs == {"gt": 0}:
+                    return self.data_type.from_import(IMPORT_POSITIVE_INT)
+                if data_type_kwargs == {"lt": 0}:
+                    return self.data_type.from_import(IMPORT_NEGATIVE_INT)
+                if data_type_kwargs == {"ge": 0} and self.use_non_positive_negative_number_constrained_types:
+                    return self.data_type.from_import(IMPORT_NON_NEGATIVE_INT)
+                if data_type_kwargs == {"le": 0} and self.use_non_positive_negative_number_constrained_types:
+                    return self.data_type.from_import(IMPORT_NON_POSITIVE_INT)
+            kwargs = {k: int(v) for k, v in data_type_kwargs.items()}
+            if strict:
+                kwargs["strict"] = True
+            return self.data_type.from_import(IMPORT_CONINT, kwargs=kwargs)
+        if strict:
+            return self.strict_type_map[StrictTypes.int]
+        return self.type_map[types]
+
+    def get_data_float_type(  # noqa: PLR0911
+        self,
+        types: Types,
+        **kwargs: Any,
+    ) -> DataType:
+        data_type_kwargs = self.transform_kwargs(kwargs, number_kwargs)
+        strict = StrictTypes.float in self.strict_types
+        if data_type_kwargs:
+            if not strict:
+                if data_type_kwargs == {"gt": 0}:
+                    return self.data_type.from_import(IMPORT_POSITIVE_FLOAT)
+                if data_type_kwargs == {"lt": 0}:
+                    return self.data_type.from_import(IMPORT_NEGATIVE_FLOAT)
+                if data_type_kwargs == {"ge": 0} and self.use_non_positive_negative_number_constrained_types:
+                    return self.data_type.from_import(IMPORT_NON_NEGATIVE_FLOAT)
+                if data_type_kwargs == {"le": 0} and self.use_non_positive_negative_number_constrained_types:
+                    return self.data_type.from_import(IMPORT_NON_POSITIVE_FLOAT)
+            kwargs = {k: float(v) for k, v in data_type_kwargs.items()}
+            if strict:
+                kwargs["strict"] = True
+            return self.data_type.from_import(IMPORT_CONFLOAT, kwargs=kwargs)
+        if strict:
+            return self.strict_type_map[StrictTypes.float]
+        return self.type_map[types]
+
+    def get_data_decimal_type(self, types: Types, **kwargs: Any) -> DataType:
+        data_type_kwargs = self.transform_kwargs(kwargs, number_kwargs)
+        if data_type_kwargs:
+            return self.data_type.from_import(
+                IMPORT_CONDECIMAL,
+                kwargs={k: Decimal(str(v) if isinstance(v, UnionIntFloat) else v) for k, v in data_type_kwargs.items()},
+            )
+        return self.type_map[types]
+
+    def get_data_str_type(self, types: Types, **kwargs: Any) -> DataType:
+        data_type_kwargs: dict[str, Any] = self.transform_kwargs(kwargs, string_kwargs)
+        strict = StrictTypes.str in self.strict_types
+        if data_type_kwargs:
+            if strict:
+                data_type_kwargs["strict"] = True
+            if self.PATTERN_KEY in data_type_kwargs:
+                escaped_regex = data_type_kwargs[self.PATTERN_KEY].translate(escape_characters)
+                # TODO: remove unneeded escaped characters
+                data_type_kwargs[self.PATTERN_KEY] = f"r'{escaped_regex}'"
+            return self.data_type.from_import(IMPORT_CONSTR, kwargs=data_type_kwargs)
+        if strict:
+            return self.strict_type_map[StrictTypes.str]
+        return self.type_map[types]
+
+    def get_data_bytes_type(self, types: Types, **kwargs: Any) -> DataType:
+        data_type_kwargs: dict[str, Any] = self.transform_kwargs(kwargs, bytes_kwargs)
+        strict = StrictTypes.bytes in self.strict_types
+        if data_type_kwargs and not strict:
+            return self.data_type.from_import(IMPORT_CONBYTES, kwargs=data_type_kwargs)
+        # conbytes doesn't accept strict argument
+        # https://github.com/samuelcolvin/pydantic/issues/2489
+        if strict:
+            return self.strict_type_map[StrictTypes.bytes]
+        return self.type_map[types]
+
+    def get_data_type(  # noqa: PLR0911
+        self,
+        types: Types,
+        **kwargs: Any,
+    ) -> DataType:
+        if types == Types.string:
+            return self.get_data_str_type(types, **kwargs)
+        if types in {Types.int32, Types.int64, Types.integer}:
+            return self.get_data_int_type(types, **kwargs)
+        if types in {Types.float, Types.double, Types.number, Types.time}:
+            return self.get_data_float_type(types, **kwargs)
+        if types == Types.decimal:
+            return self.get_data_decimal_type(types, **kwargs)
+        if types == Types.binary:
+            return self.get_data_bytes_type(types, **kwargs)
+        if types == Types.boolean and StrictTypes.bool in self.strict_types:
+            return self.strict_type_map[StrictTypes.bool]
+
+        return self.type_map[types]
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/__init__.py 0.34.0-1/src/datamodel_code_generator/model/pydantic_v2/__init__.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/pydantic_v2/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,41 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Optional
+
+from pydantic import BaseModel as _BaseModel
+
+from .base_model import BaseModel, DataModelField, UnionMode
+from .root_model import RootModel
+from .types import DataTypeManager
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable
+
+
+def dump_resolve_reference_action(class_names: Iterable[str]) -> str:
+    return "\n".join(f"{class_name}.model_rebuild()" for class_name in class_names)
+
+
+class ConfigDict(_BaseModel):
+    extra: Optional[str] = None  # noqa: UP045
+    title: Optional[str] = None  # noqa: UP045
+    populate_by_name: Optional[bool] = None  # noqa: UP045
+    allow_extra_fields: Optional[bool] = None  # noqa: UP045
+    extra_fields: Optional[str] = None  # noqa: UP045
+    from_attributes: Optional[bool] = None  # noqa: UP045
+    frozen: Optional[bool] = None  # noqa: UP045
+    arbitrary_types_allowed: Optional[bool] = None  # noqa: UP045
+    protected_namespaces: Optional[tuple[str, ...]] = None  # noqa: UP045
+    regex_engine: Optional[str] = None  # noqa: UP045
+    use_enum_values: Optional[bool] = None  # noqa: UP045
+    coerce_numbers_to_str: Optional[bool] = None  # noqa: UP045
+
+
+__all__ = [
+    "BaseModel",
+    "DataModelField",
+    "DataTypeManager",
+    "RootModel",
+    "UnionMode",
+    "dump_resolve_reference_action",
+]
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/base_model.py 0.34.0-1/src/datamodel_code_generator/model/pydantic_v2/base_model.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/base_model.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/pydantic_v2/base_model.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,251 @@
+from __future__ import annotations
+
+import re
+from enum import Enum
+from typing import TYPE_CHECKING, Any, ClassVar, NamedTuple, Optional
+
+from pydantic import Field
+from typing_extensions import Literal
+
+from datamodel_code_generator.model.base import UNDEFINED, DataModelFieldBase
+from datamodel_code_generator.model.pydantic.base_model import (
+    BaseModelBase,
+)
+from datamodel_code_generator.model.pydantic.base_model import (
+    Constraints as _Constraints,
+)
+from datamodel_code_generator.model.pydantic.base_model import (
+    DataModelField as DataModelFieldV1,
+)
+from datamodel_code_generator.model.pydantic_v2.imports import IMPORT_CONFIG_DICT
+from datamodel_code_generator.util import field_validator, model_validator
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+    from pathlib import Path
+
+    from datamodel_code_generator.reference import Reference
+
+
+class UnionMode(Enum):
+    smart = "smart"
+    left_to_right = "left_to_right"
+
+
+class Constraints(_Constraints):
+    # To override existing pattern alias
+    regex: Optional[str] = Field(None, alias="regex")  # noqa: UP045
+    pattern: Optional[str] = Field(None, alias="pattern")  # noqa: UP045
+
+    @model_validator(mode="before")
+    def validate_min_max_items(cls, values: Any) -> dict[str, Any]:  # noqa: N805
+        if not isinstance(values, dict):  # pragma: no cover
+            return values
+        min_items = values.pop("minItems", None)
+        if min_items is not None:
+            values["minLength"] = min_items
+        max_items = values.pop("maxItems", None)
+        if max_items is not None:
+            values["maxLength"] = max_items
+        return values
+
+
+class DataModelField(DataModelFieldV1):
+    _EXCLUDE_FIELD_KEYS: ClassVar[set[str]] = {
+        "alias",
+        "default",
+        "gt",
+        "ge",
+        "lt",
+        "le",
+        "multiple_of",
+        "min_length",
+        "max_length",
+        "pattern",
+    }
+    _DEFAULT_FIELD_KEYS: ClassVar[set[str]] = {
+        "default",
+        "default_factory",
+        "alias",
+        "alias_priority",
+        "validation_alias",
+        "serialization_alias",
+        "title",
+        "description",
+        "examples",
+        "exclude",
+        "discriminator",
+        "json_schema_extra",
+        "frozen",
+        "validate_default",
+        "repr",
+        "init_var",
+        "kw_only",
+        "pattern",
+        "strict",
+        "gt",
+        "ge",
+        "lt",
+        "le",
+        "multiple_of",
+        "allow_inf_nan",
+        "max_digits",
+        "decimal_places",
+        "min_length",
+        "max_length",
+        "union_mode",
+    }
+    constraints: Optional[Constraints] = None  # pyright: ignore[reportIncompatibleVariableOverride]  # noqa: UP045
+    _PARSE_METHOD: ClassVar[str] = "model_validate"
+    can_have_extra_keys: ClassVar[bool] = False
+
+    @field_validator("extras")
+    def validate_extras(cls, values: Any) -> dict[str, Any]:  # noqa: N805
+        if not isinstance(values, dict):  # pragma: no cover
+            return values
+        if "examples" in values:
+            return values
+
+        if "example" in values:
+            values["examples"] = [values.pop("example")]
+        return values
+
+    def process_const(self) -> None:
+        if "const" not in self.extras:
+            return
+        self.const = True
+        self.nullable = False
+        const = self.extras["const"]
+        self.data_type = self.data_type.__class__(literals=[const])
+        if not self.default:
+            self.default = const
+
+    def _process_data_in_str(self, data: dict[str, Any]) -> None:
+        if self.const:
+            # const is removed in pydantic 2.0
+            data.pop("const")
+
+        # unique_items is not supported in pydantic 2.0
+        data.pop("unique_items", None)
+
+        if "union_mode" in data:
+            if self.data_type.is_union:
+                data["union_mode"] = data.pop("union_mode").value
+            else:
+                data.pop("union_mode")
+
+        # **extra is not supported in pydantic 2.0
+        json_schema_extra = {k: v for k, v in data.items() if k not in self._DEFAULT_FIELD_KEYS}
+        if json_schema_extra:
+            data["json_schema_extra"] = json_schema_extra
+            for key in json_schema_extra:
+                data.pop(key)
+
+    def _process_annotated_field_arguments(  # noqa: PLR6301
+        self,
+        field_arguments: list[str],
+    ) -> list[str]:
+        return field_arguments
+
+
+class ConfigAttribute(NamedTuple):
+    from_: str
+    to: str
+    invert: bool
+
+
+class BaseModel(BaseModelBase):
+    TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic_v2/BaseModel.jinja2"
+    BASE_CLASS: ClassVar[str] = "pydantic.BaseModel"
+    CONFIG_ATTRIBUTES: ClassVar[list[ConfigAttribute]] = [
+        ConfigAttribute("allow_population_by_field_name", "populate_by_name", False),  # noqa: FBT003
+        ConfigAttribute("populate_by_name", "populate_by_name", False),  # noqa: FBT003
+        ConfigAttribute("allow_mutation", "frozen", True),  # noqa: FBT003
+        ConfigAttribute("frozen", "frozen", False),  # noqa: FBT003
+    ]
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, Any] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        super().__init__(
+            reference=reference,
+            fields=fields,
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+        config_parameters: dict[str, Any] = {}
+
+        extra = self._get_config_extra()
+        if extra:
+            config_parameters["extra"] = extra
+
+        for from_, to, invert in self.CONFIG_ATTRIBUTES:
+            if from_ in self.extra_template_data:
+                config_parameters[to] = (
+                    not self.extra_template_data[from_] if invert else self.extra_template_data[from_]
+                )
+        for data_type in self.all_data_types:
+            if data_type.is_custom_type:  # pragma: no cover
+                config_parameters["arbitrary_types_allowed"] = True
+                break
+
+        for field in self.fields:
+            # Check if a regex pattern uses lookarounds.
+            # Depending on the generation configuration, the pattern may end up in two different places.
+            pattern = (isinstance(field.constraints, Constraints) and field.constraints.pattern) or (
+                field.data_type.kwargs or {}
+            ).get("pattern")
+            if pattern and re.search(r"\(\?<?[=!]", pattern):
+                config_parameters["regex_engine"] = '"python-re"'
+                break
+
+        if isinstance(self.extra_template_data.get("config"), dict):
+            for key, value in self.extra_template_data["config"].items():
+                config_parameters[key] = value  # noqa: PERF403
+
+        if config_parameters:
+            from datamodel_code_generator.model.pydantic_v2 import ConfigDict  # noqa: PLC0415
+
+            self.extra_template_data["config"] = ConfigDict.parse_obj(config_parameters)  # pyright: ignore[reportArgumentType]
+            self._additional_imports.append(IMPORT_CONFIG_DICT)
+
+    def _get_config_extra(self) -> Literal["'allow'", "'forbid'", "'ignore'"] | None:
+        additional_properties = self.extra_template_data.get("additionalProperties")
+        allow_extra_fields = self.extra_template_data.get("allow_extra_fields")
+        extra_fields = self.extra_template_data.get("extra_fields")
+
+        config_extra = None
+        if allow_extra_fields or extra_fields == "allow":
+            config_extra = "'allow'"
+        elif extra_fields == "forbid":
+            config_extra = "'forbid'"
+        elif extra_fields == "ignore":
+            config_extra = "'ignore'"
+        elif additional_properties is True:
+            config_extra = "'allow'"
+        elif additional_properties is False:
+            config_extra = "'forbid'"
+        return config_extra
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/imports.py 0.34.0-1/src/datamodel_code_generator/model/pydantic_v2/imports.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/imports.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/pydantic_v2/imports.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+from __future__ import annotations
+
+from datamodel_code_generator.imports import Import
+
+IMPORT_CONFIG_DICT = Import.from_full_path("pydantic.ConfigDict")
+IMPORT_AWARE_DATETIME = Import.from_full_path("pydantic.AwareDatetime")
+IMPORT_NAIVE_DATETIME = Import.from_full_path("pydantic.NaiveDatetime")
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/root_model.py 0.34.0-1/src/datamodel_code_generator/model/pydantic_v2/root_model.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/root_model.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/pydantic_v2/root_model.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+from __future__ import annotations
+
+from typing import Any, ClassVar, Literal
+
+from datamodel_code_generator.model.pydantic_v2.base_model import BaseModel
+
+
+class RootModel(BaseModel):
+    TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic_v2/RootModel.jinja2"
+    BASE_CLASS: ClassVar[str] = "pydantic.RootModel"
+
+    def __init__(
+        self,
+        **kwargs: Any,
+    ) -> None:
+        # Remove custom_base_class for Pydantic V2 models; behaviour is different from Pydantic V1 as it will not
+        # be treated as a root model. custom_base_class cannot both implement BaseModel and RootModel!
+        if "custom_base_class" in kwargs:
+            kwargs.pop("custom_base_class")
+
+        super().__init__(**kwargs)
+
+    def _get_config_extra(self) -> Literal["'allow'", "'forbid'"] | None:  # noqa: PLR6301
+        # PydanticV2 RootModels cannot have extra fields
+        return None
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/types.py 0.34.0-1/src/datamodel_code_generator/model/pydantic_v2/types.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/pydantic_v2/types.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,50 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, ClassVar
+
+from datamodel_code_generator.format import DatetimeClassType
+from datamodel_code_generator.model.pydantic import DataTypeManager as _DataTypeManager
+from datamodel_code_generator.model.pydantic.imports import IMPORT_CONSTR
+from datamodel_code_generator.model.pydantic_v2.imports import (
+    IMPORT_AWARE_DATETIME,
+    IMPORT_NAIVE_DATETIME,
+)
+from datamodel_code_generator.types import DataType, StrictTypes, Types
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+
+class DataTypeManager(_DataTypeManager):
+    PATTERN_KEY: ClassVar[str] = "pattern"
+
+    def type_map_factory(
+        self,
+        data_type: type[DataType],
+        strict_types: Sequence[StrictTypes],
+        pattern_key: str,
+        target_datetime_class: DatetimeClassType | None = None,
+    ) -> dict[Types, DataType]:
+        result = {
+            **super().type_map_factory(
+                data_type,
+                strict_types,
+                pattern_key,
+                target_datetime_class or DatetimeClassType.Datetime,
+            ),
+            Types.hostname: self.data_type.from_import(
+                IMPORT_CONSTR,
+                strict=StrictTypes.str in strict_types,
+                # https://github.com/horejsek/python-fastjsonschema/blob/61c6997a8348b8df9b22e029ca2ba35ef441fbb8/fastjsonschema/draft04.py#L31
+                kwargs={
+                    pattern_key: r"r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*"
+                    r"([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])$'",
+                    **({"strict": True} if StrictTypes.str in strict_types else {}),
+                },
+            ),
+        }
+        if target_datetime_class == DatetimeClassType.Awaredatetime:
+            result[Types.date_time] = data_type.from_import(IMPORT_AWARE_DATETIME)
+        elif target_datetime_class == DatetimeClassType.Naivedatetime:
+            result[Types.date_time] = data_type.from_import(IMPORT_NAIVE_DATETIME)
+        return result
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/rootmodel.py 0.34.0-1/src/datamodel_code_generator/model/rootmodel.py
--- 0.26.4-3/src/datamodel_code_generator/model/rootmodel.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/rootmodel.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,9 @@
+from __future__ import annotations
+
+from typing import ClassVar
+
+from datamodel_code_generator.model import DataModel
+
+
+class RootModel(DataModel):
+    TEMPLATE_FILE_PATH: ClassVar[str] = "root.jinja2"
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/scalar.py 0.34.0-1/src/datamodel_code_generator/model/scalar.py
--- 0.26.4-3/src/datamodel_code_generator/model/scalar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/scalar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,83 @@
+from __future__ import annotations
+
+from collections import defaultdict
+from typing import TYPE_CHECKING, Any, ClassVar
+
+from datamodel_code_generator.imports import IMPORT_TYPE_ALIAS, Import
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model.base import UNDEFINED
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from datamodel_code_generator.reference import Reference
+
+_INT: str = "int"
+_FLOAT: str = "float"
+_BOOLEAN: str = "bool"
+_STR: str = "str"
+
+# default graphql scalar types
+DEFAULT_GRAPHQL_SCALAR_TYPE = _STR
+
+DEFAULT_GRAPHQL_SCALAR_TYPES: dict[str, str] = {
+    "Boolean": _BOOLEAN,
+    "String": _STR,
+    "ID": _STR,
+    "Int": _INT,
+    "Float": _FLOAT,
+}
+
+
+class DataTypeScalar(DataModel):
+    TEMPLATE_FILE_PATH: ClassVar[str] = "Scalar.jinja2"
+    BASE_CLASS: ClassVar[str] = ""
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_TYPE_ALIAS,)
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        methods: list[str] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        extra_template_data = extra_template_data or defaultdict(dict)
+
+        scalar_name = reference.name
+        if scalar_name not in extra_template_data:
+            extra_template_data[scalar_name] = defaultdict(dict)
+
+        # py_type
+        py_type = extra_template_data[scalar_name].get(
+            "py_type",
+            DEFAULT_GRAPHQL_SCALAR_TYPES.get(reference.name, DEFAULT_GRAPHQL_SCALAR_TYPE),
+        )
+        extra_template_data[scalar_name]["py_type"] = py_type
+
+        super().__init__(
+            reference=reference,
+            fields=fields,
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            methods=methods,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            treat_dot_as_module=treat_dot_as_module,
+        )
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/Enum.jinja2 0.34.0-1/src/datamodel_code_generator/model/template/Enum.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/Enum.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/template/Enum.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+class {{ class_name }}({{ base_class }}):
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- for field in fields %}
+    {{ field.name }} = {{ field.default }}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+    {%- endif %}
+{%- endfor -%}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/Scalar.jinja2 0.34.0-1/src/datamodel_code_generator/model/template/Scalar.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/Scalar.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/template/Scalar.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,6 @@
+{{ class_name }}: TypeAlias = {{ py_type }}
+{%- if description %}
+"""
+{{ description }}
+"""
+{%- endif %}
\ No newline at end of file
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/TypedDict.jinja2 0.34.0-1/src/datamodel_code_generator/model/template/TypedDict.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/TypedDict.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/template/TypedDict.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,5 @@
+{%- if is_functional_syntax %}
+{% include 'TypedDictFunction.jinja2' %}
+{%- else %}
+{% include 'TypedDictClass.jinja2' %}
+{%- endif %}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/TypedDictClass.jinja2 0.34.0-1/src/datamodel_code_generator/model/template/TypedDictClass.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/TypedDictClass.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/template/TypedDictClass.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+class {{ class_name }}({{ base_class }}):
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- if not fields and not description %}
+    pass
+{%- endif %}
+{%- for field in fields %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+    {%- endif %}
+{%- endfor -%}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/TypedDictFunction.jinja2 0.34.0-1/src/datamodel_code_generator/model/template/TypedDictFunction.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/TypedDictFunction.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/template/TypedDictFunction.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+{%- if description %}
+"""
+{{ description | indent(4) }}
+"""
+{%- endif %}
+{{ class_name }} = TypedDict('{{ class_name }}', {
+{%- for field in all_fields %}
+    '{{ field.key }}': {{ field.type_hint }},
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+    {%- endif %}
+{%- endfor -%}
+})
+
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/Union.jinja2 0.34.0-1/src/datamodel_code_generator/model/template/Union.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/Union.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/template/Union.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,10 @@
+{%- if description %}
+# {{ description | replace('\n', '\n# ') }}
+{%- endif %}
+{%- if fields|length > 1 %}
+{{ class_name }}: TypeAlias = Union[
+{%- for field in fields %}
+    '{{ field.name }}',
+{%- endfor %}
+]{% else %}
+{{ class_name }}: TypeAlias = {{ fields[0].name }}{% endif %}
\ No newline at end of file
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/dataclass.jinja2 0.34.0-1/src/datamodel_code_generator/model/template/dataclass.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/dataclass.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/template/dataclass.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+@dataclass
+{%- if keyword_only or frozen -%}
+(
+{%- if keyword_only -%}kw_only=True{%- endif -%}
+{%- if keyword_only and frozen -%}, {% endif -%}
+{%- if frozen -%}frozen=True{%- endif -%}
+)
+{%- endif %}
+{%- if base_class %}
+class {{ class_name }}({{ base_class }}):
+{%- else %}
+class {{ class_name }}:
+{%- endif %}
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- if not fields and not description %}
+    pass
+{%- endif %}
+{%- for field in fields -%}
+    {%- if field.field %}
+    {{ field.name }}: {{ field.type_hint }} = {{ field.field }}
+    {%- else %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- if not (field.required or (field.represented_default == 'None' and field.strip_default_none))
+            %} = {{ field.represented_default }}
+    {%- endif -%}
+    {%- endif %}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+    {%- endif %}
+{%- endfor -%}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/msgspec.jinja2 0.34.0-1/src/datamodel_code_generator/model/template/msgspec.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/msgspec.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/template/msgspec.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,42 @@
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+{%- if base_class %}
+class {{ class_name }}({{ base_class }}{%- for key, value in (base_class_kwargs|default({})).items() -%}
+, {{ key }}={{ value }}
+{%- endfor -%}):
+{%- else %}
+class {{ class_name }}:
+{%- endif %}
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- if not fields and not description %}
+    pass
+{%- endif %}
+{%- for field in fields -%}
+    {%- if not field.annotated and field.field %}
+    {{ field.name }}: {{ field.type_hint }} = {{ field.field }}
+    {%- else %}
+    {%- if field.annotated and not field.field %}
+    {{ field.name }}: {{ field.annotated }}
+    {%- elif field.annotated and field.field %}
+    {{ field.name }}: {{ field.annotated }} = {{ field.field }}
+    {%- else %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- endif %}
+    {%- if not field.field and (not field.required or field.data_type.is_optional or field.nullable)
+            %} = {{ field.represented_default }}
+    {%- endif -%}
+    {%- endif %}
+
+
+
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+    {%- endif %}
+{%- endfor -%}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 0.34.0-1/src/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+class {{ class_name }}({{ base_class }}):{% if comment is defined %}  # {{ comment }}{% endif %}
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- if not fields and not description %}
+    pass
+{%- endif %}
+{%- if config %}
+{%- filter indent(4) %}
+{% include 'Config.jinja2' %}
+{%- endfilter %}
+{%- endif %}
+{%- for field in fields -%}
+    {%- if not field.annotated and field.field %}
+    {{ field.name }}: {{ field.type_hint }} = {{ field.field }}
+    {%- else %}
+    {%- if field.annotated %}
+    {{ field.name }}: {{ field.annotated }}
+    {%- else %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- endif %}
+    {%- if not (field.required or (field.represented_default == 'None' and field.strip_default_none))
+            %} = {{ field.represented_default }}
+    {%- endif -%}
+    {%- endif %}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+    {%- endif %}
+{%- for method in methods -%}
+    {{ method }}
+{%- endfor -%}
+{%- endfor -%}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 0.34.0-1/src/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,36 @@
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+class {{ class_name }}({{ base_class }}):{% if comment is defined %}  # {{ comment }}{% endif %}
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- if config %}
+{%- filter indent(4) %}
+{% include 'Config.jinja2' %}
+{%- endfilter %}
+{%- endif %}
+{%- if not fields and not description %}
+    pass
+{%- else %}
+    {%- set field = fields[0] %}
+    {%- if not field.annotated and field.field %}
+    __root__: {{ field.type_hint }} = {{ field.field }}
+    {%- else %}
+    {%- if field.annotated %}
+    __root__: {{ field.annotated }}
+    {%- else %}
+    __root__: {{ field.type_hint }}
+    {%- endif %}
+    {%- if not (field.required or (field.represented_default == 'None' and field.strip_default_none))
+            %} = {{ field.represented_default }}
+    {%- endif -%}
+    {%- endif %}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+    {%- endif %}
+{%- endif %}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/pydantic/Config.jinja2 0.34.0-1/src/datamodel_code_generator/model/template/pydantic/Config.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/pydantic/Config.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/template/pydantic/Config.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,4 @@
+class Config:
+{%- for field_name, value in config.dict(exclude_unset=True).items() %}
+    {{ field_name }} = {{ value }}
+{%- endfor %}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/pydantic/dataclass.jinja2 0.34.0-1/src/datamodel_code_generator/model/template/pydantic/dataclass.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/pydantic/dataclass.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/template/pydantic/dataclass.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,29 @@
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+@dataclass
+{%- if base_class %}
+class {{ class_name }}({{ base_class }}):
+{%- else %}
+class {{ class_name }}:
+{%- endif %}
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- if not fields %}
+    pass
+{%- endif %}
+{%- for field in fields -%}
+    {%- if field.default %}
+    {{ field.name }}: {{ field.type_hint }} = {{field.default}}
+    {%- else %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- endif %}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+    {%- endif %}
+{%- endfor -%}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2 0.34.0-1/src/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,49 @@
+{% if base_class != "BaseModel" and "," not in base_class and not fields and not config -%}
+
+{# if this is just going to be `class Foo(Bar): pass`, then might as well just make Foo
+an alias for Bar: every pydantic model class consumes considerable memory. #}
+{{ class_name }} = {{ base_class }}
+
+{% else -%}
+
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+class {{ class_name }}({{ base_class }}):{% if comment is defined %}  # {{ comment }}{% endif %}
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- if not fields and not description %}
+    pass
+{%- endif %}
+{%- if config %}
+{%- filter indent(4) %}
+{% include 'ConfigDict.jinja2' %}
+{%- endfilter %}
+{%- endif %}
+{%- for field in fields -%}
+    {%- if not field.annotated and field.field %}
+    {{ field.name }}: {{ field.type_hint }} = {{ field.field }}
+    {%- else %}
+    {%- if field.annotated %}
+    {{ field.name }}: {{ field.annotated }}
+    {%- else %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- endif %}
+    {%- if not (field.required or (field.represented_default == 'None' and field.strip_default_none)) or field.data_type.is_optional
+            %} = {{ field.represented_default }}
+    {%- endif -%}
+    {%- endif %}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+    {%- endif %}
+{%- for method in methods -%}
+    {{ method }}
+{%- endfor -%}
+{%- endfor -%}
+
+{%- endif %}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2 0.34.0-1/src/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,5 @@
+model_config = ConfigDict(
+{%- for field_name, value in config.dict(exclude_unset=True).items() %}
+    {{ field_name }}={{ value }},
+{%- endfor %}
+)
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2 0.34.0-1/src/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,45 @@
+{%- macro get_type_hint(_fields) -%}
+{%- if _fields -%}
+{#There will only ever be a single field for RootModel#}
+{{- _fields[0].type_hint}}
+{%- endif -%}
+{%- endmacro -%}
+
+
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+
+class {{ class_name }}({{ base_class }}{%- if fields -%}[{{get_type_hint(fields)}}]{%- endif -%}):{% if comment is defined %}  # {{ comment }}{% endif %}
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- if config %}
+{%- filter indent(4) %}
+{% include 'ConfigDict.jinja2' %}
+{%- endfilter %}
+{%- endif %}
+{%- if not fields and not description %}
+    pass
+{%- else %}
+    {%- set field = fields[0] %}
+    {%- if not field.annotated and field.field %}
+    root: {{ field.type_hint }} = {{ field.field }}
+    {%- else %}
+    {%- if field.annotated %}
+    root: {{ field.annotated }}
+    {%- else %}
+    root: {{ field.type_hint }}
+    {%- endif %}
+    {%- if not (field.required or (field.represented_default == 'None' and field.strip_default_none))
+            %} = {{ field.represented_default }}
+    {%- endif -%}
+    {%- endif %}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+    {%- endif %}
+{%- endif %}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/root.jinja2 0.34.0-1/src/datamodel_code_generator/model/template/root.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/root.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/template/root.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,6 @@
+{%- set field = fields[0] %}
+{%- if field.annotated %}
+{{ class_name }} = {{ field.annotated }}
+{%- else %}
+{{ class_name }} = {{ field.type_hint }}
+{%- endif %}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/typed_dict.py 0.34.0-1/src/datamodel_code_generator/model/typed_dict.py
--- 0.26.4-3/src/datamodel_code_generator/model/typed_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/typed_dict.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,157 @@
+from __future__ import annotations
+
+import keyword
+from typing import TYPE_CHECKING, Any, ClassVar
+
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model.base import UNDEFINED
+from datamodel_code_generator.model.imports import (
+    IMPORT_NOT_REQUIRED,
+    IMPORT_NOT_REQUIRED_BACKPORT,
+    IMPORT_TYPED_DICT,
+)
+from datamodel_code_generator.types import NOT_REQUIRED_PREFIX
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+    from collections.abc import Iterator
+    from pathlib import Path
+
+    from datamodel_code_generator.reference import Reference
+
+from datamodel_code_generator.imports import Import  # noqa: TC001
+
+escape_characters = str.maketrans({
+    "\\": r"\\",
+    "'": r"\'",
+    "\b": r"\b",
+    "\f": r"\f",
+    "\n": r"\n",
+    "\r": r"\r",
+    "\t": r"\t",
+})
+
+
+def _is_valid_field_name(field: DataModelFieldBase) -> bool:
+    name = field.original_name or field.name
+    if name is None:  # pragma: no cover
+        return False
+    return name.isidentifier() and not keyword.iskeyword(name)
+
+
+class TypedDict(DataModel):
+    TEMPLATE_FILE_PATH: ClassVar[str] = "TypedDict.jinja2"
+    BASE_CLASS: ClassVar[str] = "typing.TypedDict"
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_TYPED_DICT,)
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        methods: list[str] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        super().__init__(
+            reference=reference,
+            fields=fields,
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            methods=methods,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+
+    @property
+    def is_functional_syntax(self) -> bool:
+        return any(not _is_valid_field_name(f) for f in self.fields)
+
+    @property
+    def all_fields(self) -> Iterator[DataModelFieldBase]:
+        for base_class in self.base_classes:
+            if base_class.reference is None:  # pragma: no cover
+                continue
+            data_model = base_class.reference.source
+            if not isinstance(data_model, DataModel):  # pragma: no cover
+                continue
+
+            if isinstance(data_model, TypedDict):  # pragma: no cover
+                yield from data_model.all_fields
+
+        yield from self.fields
+
+    def render(self, *, class_name: str | None = None) -> str:
+        return self._render(
+            class_name=class_name or self.class_name,
+            fields=self.fields,
+            decorators=self.decorators,
+            base_class=self.base_class,
+            methods=self.methods,
+            description=self.description,
+            is_functional_syntax=self.is_functional_syntax,
+            all_fields=self.all_fields,
+            **self.extra_template_data,
+        )
+
+
+class DataModelField(DataModelFieldBase):
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_NOT_REQUIRED,)
+
+    def process_const(self) -> None:
+        if "const" not in self.extras:
+            return
+        self.const = True
+        self.nullable = False
+        const = self.extras["const"]
+        self.data_type = self.data_type.__class__(literals=[const])
+        if not self.default:
+            self.default = const
+
+    @property
+    def key(self) -> str:
+        return (self.original_name or self.name or "").translate(  # pragma: no cover
+            escape_characters
+        )
+
+    @property
+    def type_hint(self) -> str:
+        type_hint = super().type_hint
+        if self._not_required:
+            return f"{NOT_REQUIRED_PREFIX}{type_hint}]"
+        return type_hint
+
+    @property
+    def _not_required(self) -> bool:
+        return not self.required and isinstance(self.parent, TypedDict)
+
+    @property
+    def fall_back_to_nullable(self) -> bool:
+        return not self._not_required
+
+    @property
+    def imports(self) -> tuple[Import, ...]:
+        return (
+            *super().imports,
+            *(self.DEFAULT_IMPORTS if self._not_required else ()),
+        )
+
+
+class DataModelFieldBackport(DataModelField):
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_NOT_REQUIRED_BACKPORT,)
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/types.py 0.34.0-1/src/datamodel_code_generator/model/types.py
--- 0.26.4-3/src/datamodel_code_generator/model/types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/types.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,92 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+from datamodel_code_generator import DatetimeClassType, PythonVersion, PythonVersionMin
+from datamodel_code_generator.imports import (
+    IMPORT_ANY,
+    IMPORT_DECIMAL,
+    IMPORT_TIMEDELTA,
+)
+from datamodel_code_generator.types import DataType, StrictTypes, Types
+from datamodel_code_generator.types import DataTypeManager as _DataTypeManager
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+
+def type_map_factory(data_type: type[DataType]) -> dict[Types, DataType]:
+    data_type_int = data_type(type="int")
+    data_type_float = data_type(type="float")
+    data_type_str = data_type(type="str")
+    return {
+        # TODO: Should we support a special type such UUID?
+        Types.integer: data_type_int,
+        Types.int32: data_type_int,
+        Types.int64: data_type_int,
+        Types.number: data_type_float,
+        Types.float: data_type_float,
+        Types.double: data_type_float,
+        Types.decimal: data_type.from_import(IMPORT_DECIMAL),
+        Types.time: data_type_str,
+        Types.string: data_type_str,
+        Types.byte: data_type_str,  # base64 encoded string
+        Types.binary: data_type(type="bytes"),
+        Types.date: data_type_str,
+        Types.date_time: data_type_str,
+        Types.timedelta: data_type.from_import(IMPORT_TIMEDELTA),
+        Types.password: data_type_str,
+        Types.email: data_type_str,
+        Types.uuid: data_type_str,
+        Types.uuid1: data_type_str,
+        Types.uuid2: data_type_str,
+        Types.uuid3: data_type_str,
+        Types.uuid4: data_type_str,
+        Types.uuid5: data_type_str,
+        Types.uri: data_type_str,
+        Types.hostname: data_type_str,
+        Types.ipv4: data_type_str,
+        Types.ipv6: data_type_str,
+        Types.ipv4_network: data_type_str,
+        Types.ipv6_network: data_type_str,
+        Types.boolean: data_type(type="bool"),
+        Types.object: data_type.from_import(IMPORT_ANY, is_dict=True),
+        Types.null: data_type(type="None"),
+        Types.array: data_type.from_import(IMPORT_ANY, is_list=True),
+        Types.any: data_type.from_import(IMPORT_ANY),
+    }
+
+
+class DataTypeManager(_DataTypeManager):
+    def __init__(  # noqa: PLR0913, PLR0917
+        self,
+        python_version: PythonVersion = PythonVersionMin,
+        use_standard_collections: bool = False,  # noqa: FBT001, FBT002
+        use_generic_container_types: bool = False,  # noqa: FBT001, FBT002
+        strict_types: Sequence[StrictTypes] | None = None,
+        use_non_positive_negative_number_constrained_types: bool = False,  # noqa: FBT001, FBT002
+        use_union_operator: bool = False,  # noqa: FBT001, FBT002
+        use_pendulum: bool = False,  # noqa: FBT001, FBT002
+        target_datetime_class: DatetimeClassType | None = None,
+        treat_dot_as_module: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        super().__init__(
+            python_version,
+            use_standard_collections,
+            use_generic_container_types,
+            strict_types,
+            use_non_positive_negative_number_constrained_types,
+            use_union_operator,
+            use_pendulum,
+            target_datetime_class,
+            treat_dot_as_module,
+        )
+
+        self.type_map: dict[Types, DataType] = type_map_factory(self.data_type)
+
+    def get_data_type(
+        self,
+        types: Types,
+        **_: Any,
+    ) -> DataType:
+        return self.type_map[types]
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/union.py 0.34.0-1/src/datamodel_code_generator/model/union.py
--- 0.26.4-3/src/datamodel_code_generator/model/union.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/model/union.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,57 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, ClassVar
+
+from datamodel_code_generator.imports import IMPORT_TYPE_ALIAS, IMPORT_UNION, Import
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model.base import UNDEFINED
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+    from pathlib import Path
+
+    from datamodel_code_generator.reference import Reference
+
+
+class DataTypeUnion(DataModel):
+    TEMPLATE_FILE_PATH: ClassVar[str] = "Union.jinja2"
+    BASE_CLASS: ClassVar[str] = ""
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (
+        IMPORT_TYPE_ALIAS,
+        IMPORT_UNION,
+    )
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        methods: list[str] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        super().__init__(
+            reference=reference,
+            fields=fields,
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            methods=methods,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            treat_dot_as_module=treat_dot_as_module,
+        )
diff -pruN 0.26.4-3/src/datamodel_code_generator/parser/__init__.py 0.34.0-1/src/datamodel_code_generator/parser/__init__.py
--- 0.26.4-3/src/datamodel_code_generator/parser/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/parser/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,38 @@
+from __future__ import annotations
+
+from collections import UserDict
+from enum import Enum
+from typing import Callable, TypeVar
+
+TK = TypeVar("TK")
+TV = TypeVar("TV")
+
+
+class LiteralType(Enum):
+    All = "all"
+    One = "one"
+
+
+class DefaultPutDict(UserDict[TK, TV]):
+    def get_or_put(
+        self,
+        key: TK,
+        default: TV | None = None,
+        default_factory: Callable[[TK], TV] | None = None,
+    ) -> TV:
+        if key in self:
+            return self[key]
+        if default:  # pragma: no cover
+            value = self[key] = default
+            return value
+        if default_factory:
+            value = self[key] = default_factory(key)
+            return value
+        msg = "Not found default and default_factory"
+        raise ValueError(msg)  # pragma: no cover
+
+
+__all__ = [
+    "DefaultPutDict",
+    "LiteralType",
+]
diff -pruN 0.26.4-3/src/datamodel_code_generator/parser/base.py 0.34.0-1/src/datamodel_code_generator/parser/base.py
--- 0.26.4-3/src/datamodel_code_generator/parser/base.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/parser/base.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,1418 @@
+from __future__ import annotations
+
+import operator
+import re
+import sys
+from abc import ABC, abstractmethod
+from collections import OrderedDict, defaultdict
+from itertools import groupby
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, Callable, NamedTuple, Optional, Protocol, TypeVar, cast, runtime_checkable
+from urllib.parse import ParseResult
+
+from pydantic import BaseModel
+
+from datamodel_code_generator.format import (
+    DEFAULT_FORMATTERS,
+    CodeFormatter,
+    DatetimeClassType,
+    Formatter,
+    PythonVersion,
+    PythonVersionMin,
+)
+from datamodel_code_generator.imports import (
+    IMPORT_ANNOTATIONS,
+    IMPORT_LITERAL,
+    Import,
+    Imports,
+)
+from datamodel_code_generator.model import dataclass as dataclass_model
+from datamodel_code_generator.model import msgspec as msgspec_model
+from datamodel_code_generator.model import pydantic as pydantic_model
+from datamodel_code_generator.model import pydantic_v2 as pydantic_model_v2
+from datamodel_code_generator.model.base import (
+    ALL_MODEL,
+    UNDEFINED,
+    BaseClassDataType,
+    ConstraintsBase,
+    DataModel,
+    DataModelFieldBase,
+)
+from datamodel_code_generator.model.enum import Enum, Member
+from datamodel_code_generator.parser import DefaultPutDict, LiteralType
+from datamodel_code_generator.reference import ModelResolver, Reference
+from datamodel_code_generator.types import DataType, DataTypeManager, StrictTypes
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable, Iterator, Mapping, Sequence
+
+SPECIAL_PATH_FORMAT: str = "#-datamodel-code-generator-#-{}-#-special-#"
+
+
+def get_special_path(keyword: str, path: list[str]) -> list[str]:
+    return [*path, SPECIAL_PATH_FORMAT.format(keyword)]
+
+
+escape_characters = str.maketrans({
+    "\u0000": r"\x00",  # Null byte
+    "\\": r"\\",
+    "'": r"\'",
+    "\b": r"\b",
+    "\f": r"\f",
+    "\n": r"\n",
+    "\r": r"\r",
+    "\t": r"\t",
+})
+
+
+def to_hashable(item: Any) -> Any:
+    if isinstance(
+        item,
+        (
+            list,
+            tuple,
+        ),
+    ):
+        return tuple(sorted(to_hashable(i) for i in item))
+    if isinstance(item, dict):
+        return tuple(
+            sorted(
+                (
+                    k,
+                    to_hashable(v),
+                )
+                for k, v in item.items()
+            )
+        )
+    if isinstance(item, set):  # pragma: no cover
+        return frozenset(to_hashable(i) for i in item)
+    if isinstance(item, BaseModel):
+        return to_hashable(item.dict())
+    if item is None:
+        return ""
+    return item
+
+
+def dump_templates(templates: list[DataModel]) -> str:
+    return "\n\n\n".join(str(m) for m in templates)
+
+
+ReferenceMapSet = dict[str, set[str]]
+SortedDataModels = dict[str, DataModel]
+
+MAX_RECURSION_COUNT: int = sys.getrecursionlimit()
+
+
+def sort_data_models(  # noqa: PLR0912
+    unsorted_data_models: list[DataModel],
+    sorted_data_models: SortedDataModels | None = None,
+    require_update_action_models: list[str] | None = None,
+    recursion_count: int = MAX_RECURSION_COUNT,
+) -> tuple[list[DataModel], SortedDataModels, list[str]]:
+    if sorted_data_models is None:
+        sorted_data_models = OrderedDict()
+    if require_update_action_models is None:
+        require_update_action_models = []
+    sorted_model_count: int = len(sorted_data_models)
+
+    unresolved_references: list[DataModel] = []
+    for model in unsorted_data_models:
+        if not model.reference_classes:
+            sorted_data_models[model.path] = model
+        elif model.path in model.reference_classes and len(model.reference_classes) == 1:  # only self-referencing
+            sorted_data_models[model.path] = model
+            require_update_action_models.append(model.path)
+        elif (
+            not model.reference_classes - {model.path} - set(sorted_data_models)
+        ):  # reference classes have been resolved
+            sorted_data_models[model.path] = model
+            if model.path in model.reference_classes:
+                require_update_action_models.append(model.path)
+        else:
+            unresolved_references.append(model)
+    if unresolved_references:
+        if sorted_model_count != len(sorted_data_models) and recursion_count:
+            try:
+                return sort_data_models(
+                    unresolved_references,
+                    sorted_data_models,
+                    require_update_action_models,
+                    recursion_count - 1,
+                )
+            except RecursionError:  # pragma: no cover
+                pass
+
+        # sort on base_class dependency
+        while True:
+            ordered_models: list[tuple[int, DataModel]] = []
+            unresolved_reference_model_names = [m.path for m in unresolved_references]
+            for model in unresolved_references:
+                indexes = [
+                    unresolved_reference_model_names.index(b.reference.path)
+                    for b in model.base_classes
+                    if b.reference and b.reference.path in unresolved_reference_model_names
+                ]
+                if indexes:
+                    ordered_models.append((
+                        max(indexes),
+                        model,
+                    ))
+                else:
+                    ordered_models.append((
+                        -1,
+                        model,
+                    ))
+            sorted_unresolved_models = [m[1] for m in sorted(ordered_models, key=operator.itemgetter(0))]
+            if sorted_unresolved_models == unresolved_references:
+                break
+            unresolved_references = sorted_unresolved_models
+
+        # circular reference
+        unsorted_data_model_names = set(unresolved_reference_model_names)
+        for model in unresolved_references:
+            unresolved_model = model.reference_classes - {model.path} - set(sorted_data_models)
+            base_models = [getattr(s.reference, "path", None) for s in model.base_classes]
+            update_action_parent = set(require_update_action_models).intersection(base_models)
+            if not unresolved_model:
+                sorted_data_models[model.path] = model
+                if update_action_parent:
+                    require_update_action_models.append(model.path)
+                continue
+            if not unresolved_model - unsorted_data_model_names:
+                sorted_data_models[model.path] = model
+                require_update_action_models.append(model.path)
+                continue
+            # unresolved
+            unresolved_classes = ", ".join(
+                f"[class: {item.path} references: {item.reference_classes}]" for item in unresolved_references
+            )
+            msg = f"A Parser can not resolve classes: {unresolved_classes}."
+            raise Exception(msg)  # noqa: TRY002
+    return unresolved_references, sorted_data_models, require_update_action_models
+
+
+def relative(current_module: str, reference: str) -> tuple[str, str]:
+    """Find relative module path."""
+
+    current_module_path = current_module.split(".") if current_module else []
+    *reference_path, name = reference.split(".")
+
+    if current_module_path == reference_path:
+        return "", ""
+
+    i = 0
+    for x, y in zip(current_module_path, reference_path):
+        if x != y:
+            break
+        i += 1
+
+    left = "." * (len(current_module_path) - i)
+    right = ".".join(reference_path[i:])
+
+    if not left:
+        left = "."
+    if not right:
+        right = name
+    elif "." in right:
+        extra, right = right.rsplit(".", 1)
+        left += extra
+
+    return left, right
+
+
+def exact_import(from_: str, import_: str, short_name: str) -> tuple[str, str]:
+    if from_ == len(from_) * ".":
+        # Prevents "from . import foo" becoming "from ..foo import Foo"
+        # or "from .. import foo" becoming "from ...foo import Foo"
+        # when our imported module has the same parent
+        return f"{from_}{import_}", short_name
+    return f"{from_}.{import_}", short_name
+
+
+@runtime_checkable
+class Child(Protocol):
+    @property
+    def parent(self) -> Any | None:
+        raise NotImplementedError
+
+
+T = TypeVar("T")
+
+
+def get_most_of_parent(value: Any, type_: type[T] | None = None) -> T | None:
+    if isinstance(value, Child) and (type_ is None or not isinstance(value, type_)):
+        return get_most_of_parent(value.parent, type_)
+    return value
+
+
+def title_to_class_name(title: str) -> str:
+    classname = re.sub(r"[^A-Za-z0-9]+", " ", title)
+    return "".join(x for x in classname.title() if not x.isspace())
+
+
+def _find_base_classes(model: DataModel) -> list[DataModel]:
+    return [b.reference.source for b in model.base_classes if b.reference and isinstance(b.reference.source, DataModel)]
+
+
+def _find_field(original_name: str, models: list[DataModel]) -> DataModelFieldBase | None:
+    def _find_field_and_base_classes(
+        model_: DataModel,
+    ) -> tuple[DataModelFieldBase | None, list[DataModel]]:
+        for field_ in model_.fields:
+            if field_.original_name == original_name:
+                return field_, []
+        return None, _find_base_classes(model_)  # pragma: no cover
+
+    for model in models:
+        field, base_models = _find_field_and_base_classes(model)
+        if field:
+            return field
+        models.extend(base_models)  # pragma: no cover  # noqa: B909
+
+    return None  # pragma: no cover
+
+
+def _copy_data_types(data_types: list[DataType]) -> list[DataType]:
+    copied_data_types: list[DataType] = []
+    for data_type_ in data_types:
+        if data_type_.reference:
+            copied_data_types.append(data_type_.__class__(reference=data_type_.reference))
+        elif data_type_.data_types:  # pragma: no cover
+            copied_data_type = data_type_.copy()
+            copied_data_type.data_types = _copy_data_types(data_type_.data_types)
+            copied_data_types.append(copied_data_type)
+        else:
+            copied_data_types.append(data_type_.copy())
+    return copied_data_types
+
+
+class Result(BaseModel):
+    body: str
+    source: Optional[Path] = None  # noqa: UP045
+
+
+class Source(BaseModel):
+    path: Path
+    text: str
+
+    @classmethod
+    def from_path(cls, path: Path, base_path: Path, encoding: str) -> Source:
+        return cls(
+            path=path.relative_to(base_path),
+            text=path.read_text(encoding=encoding),
+        )
+
+
+class Parser(ABC):
+    def __init__(  # noqa: PLR0913, PLR0915
+        self,
+        source: str | Path | list[Path] | ParseResult,
+        *,
+        data_model_type: type[DataModel] = pydantic_model.BaseModel,
+        data_model_root_type: type[DataModel] = pydantic_model.CustomRootType,
+        data_type_manager_type: type[DataTypeManager] = pydantic_model.DataTypeManager,
+        data_model_field_type: type[DataModelFieldBase] = pydantic_model.DataModelField,
+        base_class: str | None = None,
+        additional_imports: list[str] | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        target_python_version: PythonVersion = PythonVersionMin,
+        dump_resolve_reference_action: Callable[[Iterable[str]], str] | None = None,
+        validation: bool = False,
+        field_constraints: bool = False,
+        snake_case_field: bool = False,
+        strip_default_none: bool = False,
+        aliases: Mapping[str, str] | None = None,
+        allow_population_by_field_name: bool = False,
+        apply_default_values_for_required_fields: bool = False,
+        allow_extra_fields: bool = False,
+        extra_fields: str | None = None,
+        force_optional_for_required_fields: bool = False,
+        class_name: str | None = None,
+        use_standard_collections: bool = False,
+        base_path: Path | None = None,
+        use_schema_description: bool = False,
+        use_field_description: bool = False,
+        use_default_kwarg: bool = False,
+        reuse_model: bool = False,
+        encoding: str = "utf-8",
+        enum_field_as_literal: LiteralType | None = None,
+        set_default_enum_member: bool = False,
+        use_subclass_enum: bool = False,
+        strict_nullable: bool = False,
+        use_generic_container_types: bool = False,
+        enable_faux_immutability: bool = False,
+        remote_text_cache: DefaultPutDict[str, str] | None = None,
+        disable_appending_item_suffix: bool = False,
+        strict_types: Sequence[StrictTypes] | None = None,
+        empty_enum_field_name: str | None = None,
+        custom_class_name_generator: Callable[[str], str] | None = title_to_class_name,
+        field_extra_keys: set[str] | None = None,
+        field_include_all_keys: bool = False,
+        field_extra_keys_without_x_prefix: set[str] | None = None,
+        wrap_string_literal: bool | None = None,
+        use_title_as_name: bool = False,
+        use_operation_id_as_name: bool = False,
+        use_unique_items_as_set: bool = False,
+        http_headers: Sequence[tuple[str, str]] | None = None,
+        http_ignore_tls: bool = False,
+        use_annotated: bool = False,
+        use_non_positive_negative_number_constrained_types: bool = False,
+        original_field_name_delimiter: str | None = None,
+        use_double_quotes: bool = False,
+        use_union_operator: bool = False,
+        allow_responses_without_content: bool = False,
+        collapse_root_models: bool = False,
+        special_field_name_prefix: str | None = None,
+        remove_special_field_name_prefix: bool = False,
+        capitalise_enum_members: bool = False,
+        keep_model_order: bool = False,
+        use_one_literal_as_default: bool = False,
+        known_third_party: list[str] | None = None,
+        custom_formatters: list[str] | None = None,
+        custom_formatters_kwargs: dict[str, Any] | None = None,
+        use_pendulum: bool = False,
+        http_query_parameters: Sequence[tuple[str, str]] | None = None,
+        treat_dot_as_module: bool = False,
+        use_exact_imports: bool = False,
+        default_field_extras: dict[str, Any] | None = None,
+        target_datetime_class: DatetimeClassType | None = None,
+        keyword_only: bool = False,
+        frozen_dataclasses: bool = False,
+        no_alias: bool = False,
+        formatters: list[Formatter] = DEFAULT_FORMATTERS,
+        parent_scoped_naming: bool = False,
+    ) -> None:
+        self.keyword_only = keyword_only
+        self.frozen_dataclasses = frozen_dataclasses
+        self.data_type_manager: DataTypeManager = data_type_manager_type(
+            python_version=target_python_version,
+            use_standard_collections=use_standard_collections,
+            use_generic_container_types=use_generic_container_types,
+            use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
+            strict_types=strict_types,
+            use_union_operator=use_union_operator,
+            use_pendulum=use_pendulum,
+            target_datetime_class=target_datetime_class,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+        self.data_model_type: type[DataModel] = data_model_type
+        self.data_model_root_type: type[DataModel] = data_model_root_type
+        self.data_model_field_type: type[DataModelFieldBase] = data_model_field_type
+
+        self.imports: Imports = Imports(use_exact_imports)
+        self.use_exact_imports: bool = use_exact_imports
+        self._append_additional_imports(additional_imports=additional_imports)
+
+        self.base_class: str | None = base_class
+        self.target_python_version: PythonVersion = target_python_version
+        self.results: list[DataModel] = []
+        self.dump_resolve_reference_action: Callable[[Iterable[str]], str] | None = dump_resolve_reference_action
+        self.validation: bool = validation
+        self.field_constraints: bool = field_constraints
+        self.snake_case_field: bool = snake_case_field
+        self.strip_default_none: bool = strip_default_none
+        self.apply_default_values_for_required_fields: bool = apply_default_values_for_required_fields
+        self.force_optional_for_required_fields: bool = force_optional_for_required_fields
+        self.use_schema_description: bool = use_schema_description
+        self.use_field_description: bool = use_field_description
+        self.use_default_kwarg: bool = use_default_kwarg
+        self.reuse_model: bool = reuse_model
+        self.encoding: str = encoding
+        self.enum_field_as_literal: LiteralType | None = enum_field_as_literal
+        self.set_default_enum_member: bool = set_default_enum_member
+        self.use_subclass_enum: bool = use_subclass_enum
+        self.strict_nullable: bool = strict_nullable
+        self.use_generic_container_types: bool = use_generic_container_types
+        self.use_union_operator: bool = use_union_operator
+        self.enable_faux_immutability: bool = enable_faux_immutability
+        self.custom_class_name_generator: Callable[[str], str] | None = custom_class_name_generator
+        self.field_extra_keys: set[str] = field_extra_keys or set()
+        self.field_extra_keys_without_x_prefix: set[str] = field_extra_keys_without_x_prefix or set()
+        self.field_include_all_keys: bool = field_include_all_keys
+
+        self.remote_text_cache: DefaultPutDict[str, str] = remote_text_cache or DefaultPutDict()
+        self.current_source_path: Path | None = None
+        self.use_title_as_name: bool = use_title_as_name
+        self.use_operation_id_as_name: bool = use_operation_id_as_name
+        self.use_unique_items_as_set: bool = use_unique_items_as_set
+
+        if base_path:
+            self.base_path = base_path
+        elif isinstance(source, Path):
+            self.base_path = source.absolute() if source.is_dir() else source.absolute().parent
+        else:
+            self.base_path = Path.cwd()
+
+        self.source: str | Path | list[Path] | ParseResult = source
+        self.custom_template_dir = custom_template_dir
+        self.extra_template_data: defaultdict[str, Any] = extra_template_data or defaultdict(dict)
+
+        if allow_population_by_field_name:
+            self.extra_template_data[ALL_MODEL]["allow_population_by_field_name"] = True
+
+        if allow_extra_fields:
+            self.extra_template_data[ALL_MODEL]["allow_extra_fields"] = True
+
+        if extra_fields:
+            self.extra_template_data[ALL_MODEL]["extra_fields"] = extra_fields
+
+        if enable_faux_immutability:
+            self.extra_template_data[ALL_MODEL]["allow_mutation"] = False
+
+        self.model_resolver = ModelResolver(
+            base_url=source.geturl() if isinstance(source, ParseResult) else None,
+            singular_name_suffix="" if disable_appending_item_suffix else None,
+            aliases=aliases,
+            empty_field_name=empty_enum_field_name,
+            snake_case_field=snake_case_field,
+            custom_class_name_generator=custom_class_name_generator,
+            base_path=self.base_path,
+            original_field_name_delimiter=original_field_name_delimiter,
+            special_field_name_prefix=special_field_name_prefix,
+            remove_special_field_name_prefix=remove_special_field_name_prefix,
+            capitalise_enum_members=capitalise_enum_members,
+            no_alias=no_alias,
+            parent_scoped_naming=parent_scoped_naming,
+        )
+        self.class_name: str | None = class_name
+        self.wrap_string_literal: bool | None = wrap_string_literal
+        self.http_headers: Sequence[tuple[str, str]] | None = http_headers
+        self.http_query_parameters: Sequence[tuple[str, str]] | None = http_query_parameters
+        self.http_ignore_tls: bool = http_ignore_tls
+        self.use_annotated: bool = use_annotated
+        if self.use_annotated and not self.field_constraints:  # pragma: no cover
+            msg = "`use_annotated=True` has to be used with `field_constraints=True`"
+            raise Exception(msg)  # noqa: TRY002
+        self.use_non_positive_negative_number_constrained_types = use_non_positive_negative_number_constrained_types
+        self.use_double_quotes = use_double_quotes
+        self.allow_responses_without_content = allow_responses_without_content
+        self.collapse_root_models = collapse_root_models
+        self.capitalise_enum_members = capitalise_enum_members
+        self.keep_model_order = keep_model_order
+        self.use_one_literal_as_default = use_one_literal_as_default
+        self.known_third_party = known_third_party
+        self.custom_formatter = custom_formatters
+        self.custom_formatters_kwargs = custom_formatters_kwargs
+        self.treat_dot_as_module = treat_dot_as_module
+        self.default_field_extras: dict[str, Any] | None = default_field_extras
+        self.formatters: list[Formatter] = formatters
+
+    @property
+    def iter_source(self) -> Iterator[Source]:
+        if isinstance(self.source, str):
+            yield Source(path=Path(), text=self.source)
+        elif isinstance(self.source, Path):  # pragma: no cover
+            if self.source.is_dir():
+                for path in sorted(self.source.rglob("*"), key=lambda p: p.name):
+                    if path.is_file():
+                        yield Source.from_path(path, self.base_path, self.encoding)
+            else:
+                yield Source.from_path(self.source, self.base_path, self.encoding)
+        elif isinstance(self.source, list):  # pragma: no cover
+            for path in self.source:
+                yield Source.from_path(path, self.base_path, self.encoding)
+        else:
+            yield Source(
+                path=Path(self.source.path),
+                text=self.remote_text_cache.get_or_put(self.source.geturl(), default_factory=self._get_text_from_url),
+            )
+
+    def _append_additional_imports(self, additional_imports: list[str] | None) -> None:
+        if additional_imports is None:
+            additional_imports = []
+
+        for additional_import_string in additional_imports:
+            if additional_import_string is None:
+                continue
+            new_import = Import.from_full_path(additional_import_string)
+            self.imports.append(new_import)
+
+    def _get_text_from_url(self, url: str) -> str:
+        from datamodel_code_generator.http import get_body  # noqa: PLC0415
+
+        return self.remote_text_cache.get_or_put(
+            url,
+            default_factory=lambda url_: get_body(  # noqa: ARG005
+                url, self.http_headers, self.http_ignore_tls, self.http_query_parameters
+            ),
+        )
+
+    @classmethod
+    def get_url_path_parts(cls, url: ParseResult) -> list[str]:
+        return [
+            f"{url.scheme}://{url.hostname}",
+            *url.path.split("/")[1:],
+        ]
+
+    @property
+    def data_type(self) -> type[DataType]:
+        return self.data_type_manager.data_type
+
+    @abstractmethod
+    def parse_raw(self) -> None:
+        raise NotImplementedError
+
+    def __delete_duplicate_models(self, models: list[DataModel]) -> None:  # noqa: PLR0912
+        model_class_names: dict[str, DataModel] = {}
+        model_to_duplicate_models: defaultdict[DataModel, list[DataModel]] = defaultdict(list)
+        for model in models.copy():  # noqa: PLR1702
+            if isinstance(model, self.data_model_root_type):
+                root_data_type = model.fields[0].data_type
+
+                # backward compatible
+                # Remove duplicated root model
+                if (
+                    root_data_type.reference
+                    and not root_data_type.is_dict
+                    and not root_data_type.is_list
+                    and root_data_type.reference.source in models
+                    and root_data_type.reference.name
+                    == self.model_resolver.get_class_name(model.reference.original_name, unique=False).name
+                ):
+                    # Replace referenced duplicate model to original model
+                    for child in model.reference.children[:]:
+                        child.replace_reference(root_data_type.reference)
+                    models.remove(model)
+                    for data_type in model.all_data_types:
+                        if data_type.reference:
+                            data_type.remove_reference()
+                    continue
+
+                #  Custom root model can't be inherited on restriction of Pydantic
+                for child in model.reference.children:
+                    # inheritance model
+                    if isinstance(child, DataModel):
+                        for base_class in child.base_classes[:]:
+                            if base_class.reference == model.reference:
+                                child.base_classes.remove(base_class)
+                        if not child.base_classes:  # pragma: no cover
+                            child.set_base_class()
+
+            class_name = model.duplicate_class_name or model.class_name
+            if class_name in model_class_names:
+                model_key = tuple(
+                    to_hashable(v)
+                    for v in (
+                        model.render(class_name=model.duplicate_class_name),
+                        model.imports,
+                    )
+                )
+                original_model = model_class_names[class_name]
+                original_model_key = tuple(
+                    to_hashable(v)
+                    for v in (
+                        original_model.render(class_name=original_model.duplicate_class_name),
+                        original_model.imports,
+                    )
+                )
+                if model_key == original_model_key:
+                    model_to_duplicate_models[original_model].append(model)
+                    continue
+            model_class_names[class_name] = model
+        for model, duplicate_models in model_to_duplicate_models.items():
+            for duplicate_model in duplicate_models:
+                for child in duplicate_model.reference.children[:]:
+                    if isinstance(child, DataType):
+                        child.replace_reference(model.reference)
+                    # simplify if introduce duplicate base classes
+                    if isinstance(child, DataModel):
+                        child.base_classes = list(
+                            {f"{c.module_name}.{c.type_hint}": c for c in child.base_classes}.values()
+                        )
+                models.remove(duplicate_model)
+
+    @classmethod
+    def __replace_duplicate_name_in_module(cls, models: list[DataModel]) -> None:
+        scoped_model_resolver = ModelResolver(
+            exclude_names={i.alias or i.import_ for m in models for i in m.imports},
+            duplicate_name_suffix="Model",
+        )
+
+        model_names: dict[str, DataModel] = {}
+        for model in models:
+            class_name: str = model.class_name
+            generated_name: str = scoped_model_resolver.add([model.path], class_name, unique=True, class_name=True).name
+            if class_name != generated_name:
+                model.class_name = generated_name
+            model_names[model.class_name] = model
+
+        for model in models:
+            duplicate_name = model.duplicate_class_name
+            # check only first desired name
+            if duplicate_name and duplicate_name not in model_names:
+                del model_names[model.class_name]
+                model.class_name = duplicate_name
+                model_names[duplicate_name] = model
+
+    def __change_from_import(
+        self,
+        models: list[DataModel],
+        imports: Imports,
+        scoped_model_resolver: ModelResolver,
+        init: bool,  # noqa: FBT001
+    ) -> None:
+        for model in models:
+            scoped_model_resolver.add([model.path], model.class_name)
+        for model in models:
+            before_import = model.imports
+            imports.append(before_import)
+            for data_type in model.all_data_types:
+                # To change from/import
+
+                if not data_type.reference or data_type.reference.source in models:
+                    # No need to import non-reference model.
+                    # Or, Referenced model is in the same file. we don't need to import the model
+                    continue
+
+                if isinstance(data_type, BaseClassDataType):
+                    left, right = relative(model.module_name, data_type.full_name)
+                    from_ = f"{left}{right}" if left.endswith(".") else f"{left}.{right}"
+                    import_ = data_type.reference.short_name
+                    full_path = from_, import_
+                else:
+                    from_, import_ = full_path = relative(model.module_name, data_type.full_name)
+                    if imports.use_exact:  # pragma: no cover
+                        from_, import_ = exact_import(from_, import_, data_type.reference.short_name)
+                    import_ = import_.replace("-", "_")
+                    if (
+                        len(model.module_path) > 1
+                        and model.module_path[-1].count(".") > 0
+                        and not self.treat_dot_as_module
+                    ):
+                        rel_path_depth = model.module_path[-1].count(".")
+                        from_ = from_[rel_path_depth:]
+
+                alias = scoped_model_resolver.add(full_path, import_).name
+
+                name = data_type.reference.short_name
+                if from_ and import_ and alias != name:
+                    data_type.alias = alias if data_type.reference.short_name == import_ else f"{alias}.{name}"
+
+                if init:
+                    from_ = "." + from_
+                imports.append(
+                    Import(
+                        from_=from_,
+                        import_=import_,
+                        alias=alias,
+                        reference_path=data_type.reference.path,
+                    ),
+                )
+            after_import = model.imports
+            if before_import != after_import:
+                imports.append(after_import)
+
+    @classmethod
+    def __extract_inherited_enum(cls, models: list[DataModel]) -> None:
+        for model in models.copy():
+            if model.fields:
+                continue
+            enums: list[Enum] = []
+            for base_model in model.base_classes:
+                if not base_model.reference:
+                    continue
+                source_model = base_model.reference.source
+                if isinstance(source_model, Enum):
+                    enums.append(source_model)
+            if enums:
+                models.insert(
+                    models.index(model),
+                    enums[0].__class__(
+                        fields=[f for e in enums for f in e.fields],
+                        description=model.description,
+                        reference=model.reference,
+                    ),
+                )
+                models.remove(model)
+
+    def __apply_discriminator_type(  # noqa: PLR0912, PLR0915
+        self,
+        models: list[DataModel],
+        imports: Imports,
+    ) -> None:
+        for model in models:  # noqa: PLR1702
+            for field in model.fields:
+                discriminator = field.extras.get("discriminator")
+                if not discriminator or not isinstance(discriminator, dict):
+                    continue
+                property_name = discriminator.get("propertyName")
+                if not property_name:  # pragma: no cover
+                    continue
+                field_name, alias = self.model_resolver.get_valid_field_name_and_alias(field_name=property_name)
+                discriminator["propertyName"] = field_name
+                mapping = discriminator.get("mapping", {})
+                for data_type in field.data_type.data_types:
+                    if not data_type.reference:  # pragma: no cover
+                        continue
+                    discriminator_model = data_type.reference.source
+
+                    if not isinstance(  # pragma: no cover
+                        discriminator_model,
+                        (
+                            pydantic_model.BaseModel,
+                            pydantic_model_v2.BaseModel,
+                            dataclass_model.DataClass,
+                            msgspec_model.Struct,
+                        ),
+                    ):
+                        continue  # pragma: no cover
+
+                    type_names: list[str] = []
+
+                    def check_paths(
+                        model: pydantic_model.BaseModel | pydantic_model_v2.BaseModel | Reference,
+                        mapping: dict[str, str],
+                        type_names: list[str] = type_names,
+                    ) -> None:
+                        """Helper function to validate paths for a given model."""
+                        for name, path in mapping.items():
+                            if (model.path.split("#/")[-1] != path.split("#/")[-1]) and (
+                                path.startswith("#/") or model.path[:-1] != path.split("/")[-1]
+                            ):
+                                t_path = path[str(path).find("/") + 1 :]
+                                t_disc = model.path[: str(model.path).find("#")].lstrip("../")  # noqa: B005
+                                t_disc_2 = "/".join(t_disc.split("/")[1:])
+                                if t_path not in {t_disc, t_disc_2}:
+                                    continue
+                            type_names.append(name)
+
+                    # First try to get the discriminator value from the const field
+                    for discriminator_field in discriminator_model.fields:
+                        if field_name not in {discriminator_field.original_name, discriminator_field.name}:
+                            continue
+                        if discriminator_field.extras.get("const"):
+                            type_names = [discriminator_field.extras["const"]]
+                            break
+
+                    # If no const value found, try to get it from the mapping
+                    if not type_names:
+                        # Check the main discriminator model path
+                        if mapping:
+                            check_paths(discriminator_model, mapping)  # pyright: ignore[reportArgumentType]
+
+                            # Check the base_classes if they exist
+                            if len(type_names) == 0:
+                                for base_class in discriminator_model.base_classes:
+                                    check_paths(base_class.reference, mapping)  # pyright: ignore[reportArgumentType]
+                        else:
+                            type_names = [discriminator_model.path.split("/")[-1]]
+
+                    if not type_names:  # pragma: no cover
+                        msg = f"Discriminator type is not found. {data_type.reference.path}"
+                        raise RuntimeError(msg)
+
+                    has_one_literal = False
+                    for discriminator_field in discriminator_model.fields:
+                        if field_name not in {discriminator_field.original_name, discriminator_field.name}:
+                            continue
+                        literals = discriminator_field.data_type.literals
+                        if len(literals) == 1 and literals[0] == (type_names[0] if type_names else None):
+                            has_one_literal = True
+                            if isinstance(discriminator_model, msgspec_model.Struct):  # pragma: no cover
+                                discriminator_model.add_base_class_kwarg("tag_field", f"'{field_name}'")
+                                discriminator_model.add_base_class_kwarg("tag", discriminator_field.represented_default)
+                                discriminator_field.extras["is_classvar"] = True
+                            # Found the discriminator field, no need to keep looking
+                            break
+                        for field_data_type in discriminator_field.data_type.all_data_types:
+                            if field_data_type.reference:  # pragma: no cover
+                                field_data_type.remove_reference()
+                        discriminator_field.data_type = self.data_type(literals=type_names)
+                        discriminator_field.data_type.parent = discriminator_field
+                        discriminator_field.required = True
+                        imports.append(discriminator_field.imports)
+                        has_one_literal = True
+                    if not has_one_literal:
+                        discriminator_model.fields.append(
+                            self.data_model_field_type(
+                                name=field_name,
+                                data_type=self.data_type(literals=type_names),
+                                required=True,
+                                alias=alias,
+                            )
+                        )
+                    has_imported_literal = any(import_ == IMPORT_LITERAL for import_ in imports)
+                    if has_imported_literal:  # pragma: no cover
+                        imports.append(IMPORT_LITERAL)
+
+    @classmethod
+    def _create_set_from_list(cls, data_type: DataType) -> DataType | None:
+        if data_type.is_list:
+            new_data_type = data_type.copy()
+            new_data_type.is_list = False
+            new_data_type.is_set = True
+            for data_type_ in new_data_type.data_types:
+                data_type_.parent = new_data_type
+            return new_data_type
+        if data_type.data_types:  # pragma: no cover
+            for index, nested_data_type in enumerate(data_type.data_types[:]):
+                set_data_type = cls._create_set_from_list(nested_data_type)
+                if set_data_type:  # pragma: no cover
+                    data_type.data_types[index] = set_data_type
+            return data_type
+        return None  # pragma: no cover
+
+    def __replace_unique_list_to_set(self, models: list[DataModel]) -> None:
+        for model in models:
+            for model_field in model.fields:
+                if not self.use_unique_items_as_set:
+                    continue
+
+                if not (model_field.constraints and model_field.constraints.unique_items):
+                    continue
+                set_data_type = self._create_set_from_list(model_field.data_type)
+                if set_data_type:  # pragma: no cover
+                    model_field.data_type.parent = None
+                    model_field.data_type = set_data_type
+                    set_data_type.parent = model_field
+
+    @classmethod
+    def __set_reference_default_value_to_field(cls, models: list[DataModel]) -> None:
+        for model in models:
+            for model_field in model.fields:
+                if not model_field.data_type.reference or model_field.has_default:
+                    continue
+                if (
+                    isinstance(model_field.data_type.reference.source, DataModel)
+                    and model_field.data_type.reference.source.default != UNDEFINED
+                ):
+                    # pragma: no cover
+                    model_field.default = model_field.data_type.reference.source.default
+
+    def __reuse_model(self, models: list[DataModel], require_update_action_models: list[str]) -> None:
+        if not self.reuse_model:
+            return
+        model_cache: dict[tuple[str, ...], Reference] = {}
+        duplicates = []
+        for model in models.copy():
+            model_key = tuple(to_hashable(v) for v in (model.render(class_name="M"), model.imports))
+            cached_model_reference = model_cache.get(model_key)
+            if cached_model_reference:
+                if isinstance(model, Enum):
+                    for child in model.reference.children[:]:
+                        # child is resolved data_type by reference
+                        data_model = get_most_of_parent(child)
+                        # TODO: replace reference in all modules
+                        if data_model in models:  # pragma: no cover
+                            child.replace_reference(cached_model_reference)
+                    duplicates.append(model)
+                else:
+                    index = models.index(model)
+                    inherited_model = model.__class__(
+                        fields=[],
+                        base_classes=[cached_model_reference],
+                        description=model.description,
+                        reference=Reference(
+                            name=model.name,
+                            path=model.reference.path + "/reuse",
+                        ),
+                        custom_template_dir=model._custom_template_dir,  # noqa: SLF001
+                    )
+                    if cached_model_reference.path in require_update_action_models:
+                        require_update_action_models.append(inherited_model.path)
+                    models.insert(index, inherited_model)
+                    models.remove(model)
+
+            else:
+                model_cache[model_key] = model.reference
+
+        for duplicate in duplicates:
+            models.remove(duplicate)
+
+    def __collapse_root_models(  # noqa: PLR0912
+        self,
+        models: list[DataModel],
+        unused_models: list[DataModel],
+        imports: Imports,
+        scoped_model_resolver: ModelResolver,
+    ) -> None:
+        if not self.collapse_root_models:
+            return
+
+        for model in models:  # noqa: PLR1702
+            for model_field in model.fields:
+                for data_type in model_field.data_type.all_data_types:
+                    reference = data_type.reference
+                    if not reference or not isinstance(reference.source, self.data_model_root_type):
+                        # If the data type is not a reference, we can't collapse it.
+                        # If it's a reference to a root model type, we don't do anything.
+                        continue
+
+                    # Use root-type as model_field type
+                    root_type_model = reference.source
+                    root_type_field = root_type_model.fields[0]
+
+                    if (
+                        self.field_constraints
+                        and isinstance(root_type_field.constraints, ConstraintsBase)
+                        and root_type_field.constraints.has_constraints
+                        and any(d for d in model_field.data_type.all_data_types if d.is_dict or d.is_union or d.is_list)
+                    ):
+                        continue  # pragma: no cover
+
+                    if root_type_field.data_type.reference:
+                        # If the root type field is a reference, we aren't able to collapse it yet.
+                        continue
+
+                    # set copied data_type
+                    copied_data_type = root_type_field.data_type.copy()
+                    if isinstance(data_type.parent, self.data_model_field_type):
+                        # for field
+                        # override empty field by root-type field
+                        model_field.extras = {
+                            **root_type_field.extras,
+                            **model_field.extras,
+                        }
+                        model_field.process_const()
+
+                        if self.field_constraints:
+                            model_field.constraints = ConstraintsBase.merge_constraints(
+                                root_type_field.constraints, model_field.constraints
+                            )
+
+                        data_type.parent.data_type = copied_data_type
+
+                    elif data_type.parent is not None and data_type.parent.is_list:
+                        if self.field_constraints:
+                            model_field.constraints = ConstraintsBase.merge_constraints(
+                                root_type_field.constraints, model_field.constraints
+                            )
+                        if (
+                            isinstance(
+                                root_type_field,
+                                pydantic_model.DataModelField,
+                            )
+                            and not model_field.extras.get("discriminator")
+                            and not any(t.is_list for t in model_field.data_type.data_types)
+                        ):
+                            discriminator = root_type_field.extras.get("discriminator")
+                            if discriminator:
+                                model_field.extras["discriminator"] = discriminator
+                        assert isinstance(data_type.parent, DataType)
+                        data_type.parent.data_types.remove(data_type)  # pragma: no cover
+                        data_type.parent.data_types.append(copied_data_type)
+
+                    elif isinstance(data_type.parent, DataType):
+                        # for data_type
+                        data_type_id = id(data_type)
+                        data_type.parent.data_types = [
+                            d for d in (*data_type.parent.data_types, copied_data_type) if id(d) != data_type_id
+                        ]
+                    else:  # pragma: no cover
+                        continue
+
+                    for d in root_type_field.data_type.data_types:
+                        if d.reference is None:
+                            continue
+                        from_, import_ = full_path = relative(model.module_name, d.full_name)
+                        if from_ and import_:
+                            alias = scoped_model_resolver.add(full_path, import_)
+                            d.alias = (
+                                alias.name
+                                if d.reference.short_name == import_
+                                else f"{alias.name}.{d.reference.short_name}"
+                            )
+                            imports.append([
+                                Import(
+                                    from_=from_,
+                                    import_=import_,
+                                    alias=alias.name,
+                                    reference_path=d.reference.path,
+                                )
+                            ])
+
+                    original_field = get_most_of_parent(data_type, DataModelFieldBase)
+                    if original_field:  # pragma: no cover
+                        # TODO: Improve detection of reference type
+                        imports.append(original_field.imports)
+
+                    data_type.remove_reference()
+
+                    root_type_model.reference.children = [
+                        c for c in root_type_model.reference.children if getattr(c, "parent", None)
+                    ]
+
+                    imports.remove_referenced_imports(root_type_model.path)
+                    if not root_type_model.reference.children:
+                        unused_models.append(root_type_model)
+
+    def __set_default_enum_member(  # noqa: PLR0912
+        self,
+        models: list[DataModel],
+    ) -> None:
+        if not self.set_default_enum_member:
+            return
+        for model in models:  # noqa: PLR1702
+            for model_field in model.fields:
+                if not model_field.default:
+                    continue
+                for data_type in model_field.data_type.all_data_types:
+                    if data_type.reference and isinstance(data_type.reference.source, Enum):  # pragma: no cover
+                        if isinstance(model_field.default, list):
+                            enum_member: list[Member] | (Member | None) = [
+                                e for e in (data_type.reference.source.find_member(d) for d in model_field.default) if e
+                            ]
+                        else:
+                            enum_member = data_type.reference.source.find_member(model_field.default)
+                        if not enum_member:
+                            continue
+                        model_field.default = enum_member
+                        if data_type.alias:
+                            if isinstance(enum_member, list):
+                                for enum_member_ in enum_member:
+                                    enum_member_.alias = data_type.alias
+                            else:
+                                enum_member.alias = data_type.alias
+
+    def __override_required_field(
+        self,
+        models: list[DataModel],
+    ) -> None:
+        for model in models:
+            if isinstance(model, (Enum, self.data_model_root_type)):
+                continue
+            for index, model_field in enumerate(model.fields[:]):
+                data_type = model_field.data_type
+                if (
+                    not model_field.original_name  # noqa: PLR0916
+                    or data_type.data_types
+                    or data_type.reference
+                    or data_type.type
+                    or data_type.literals
+                    or data_type.dict_key
+                ):
+                    continue
+
+                original_field = _find_field(model_field.original_name, _find_base_classes(model))
+                if not original_field:  # pragma: no cover
+                    model.fields.remove(model_field)
+                    continue
+                copied_original_field = original_field.copy()
+                if original_field.data_type.reference:
+                    data_type = self.data_type_manager.data_type(
+                        reference=original_field.data_type.reference,
+                    )
+                elif original_field.data_type.data_types:
+                    data_type = original_field.data_type.copy()
+                    data_type.data_types = _copy_data_types(original_field.data_type.data_types)
+                    for data_type_ in data_type.data_types:
+                        data_type_.parent = data_type
+                else:
+                    data_type = original_field.data_type.copy()
+                data_type.parent = copied_original_field
+                copied_original_field.data_type = data_type
+                copied_original_field.parent = model
+                copied_original_field.required = True
+                model.fields.insert(index, copied_original_field)
+                model.fields.remove(model_field)
+
+    def __sort_models(
+        self,
+        models: list[DataModel],
+        imports: Imports,
+    ) -> None:
+        if not self.keep_model_order:
+            return
+
+        models.sort(key=lambda x: x.class_name)
+
+        imported = {i for v in imports.values() for i in v}
+        model_class_name_baseclasses: dict[DataModel, tuple[str, set[str]]] = {}
+        for model in models:
+            class_name = model.class_name
+            model_class_name_baseclasses[model] = (
+                class_name,
+                {b.type_hint for b in model.base_classes if b.reference} - {class_name},
+            )
+
+        changed: bool = True
+        while changed:
+            changed = False
+            resolved = imported.copy()
+            for i in range(len(models) - 1):
+                model = models[i]
+                class_name, baseclasses = model_class_name_baseclasses[model]
+                if not baseclasses - resolved:
+                    resolved.add(class_name)
+                    continue
+                models[i], models[i + 1] = models[i + 1], model
+                changed = True
+
+    def __change_field_name(
+        self,
+        models: list[DataModel],
+    ) -> None:
+        if self.data_model_type != pydantic_model_v2.BaseModel:
+            return
+        for model in models:
+            if "Enum" in model.base_class:
+                continue
+
+            for field in model.fields:
+                filed_name = field.name
+                filed_name_resolver = ModelResolver(snake_case_field=self.snake_case_field, remove_suffix_number=True)
+                for data_type in field.data_type.all_data_types:
+                    if data_type.reference:
+                        filed_name_resolver.exclude_names.add(data_type.reference.short_name)
+                new_filed_name = filed_name_resolver.add(["field"], cast("str", filed_name)).name
+                if filed_name != new_filed_name:
+                    field.alias = filed_name
+                    field.name = new_filed_name
+
+    def __set_one_literal_on_default(self, models: list[DataModel]) -> None:
+        if not self.use_one_literal_as_default:
+            return
+        for model in models:
+            for model_field in model.fields:
+                if not model_field.required or len(model_field.data_type.literals) != 1:
+                    continue
+                model_field.default = model_field.data_type.literals[0]
+                model_field.required = False
+                if model_field.nullable is not True:  # pragma: no cover
+                    model_field.nullable = False
+
+    @classmethod
+    def __postprocess_result_modules(cls, results: dict[tuple[str, ...], Result]) -> dict[tuple[str, ...], Result]:
+        def process(input_tuple: tuple[str, ...]) -> tuple[str, ...]:
+            r = []
+            for item in input_tuple:
+                p = item.split(".")
+                if len(p) > 1:
+                    r.extend(p[:-1])
+                    r.append(p[-1])
+                else:
+                    r.append(item)
+
+            r = [*r[:-2], f"{r[-2]}.{r[-1]}"]
+            return tuple(r)
+
+        results = {process(k): v for k, v in results.items()}
+
+        init_result = next(v for k, v in results.items() if k[-1] == "__init__.py")
+        folders = {t[:-1] if t[-1].endswith(".py") else t for t in results}
+        for folder in folders:
+            for i in range(len(folder)):
+                subfolder = folder[: i + 1]
+                init_file = (*subfolder, "__init__.py")
+                results.update({init_file: init_result})
+        return results
+
+    def __change_imported_model_name(  # noqa: PLR6301
+        self,
+        models: list[DataModel],
+        imports: Imports,
+        scoped_model_resolver: ModelResolver,
+    ) -> None:
+        imported_names = {
+            imports.alias[from_][i] if i in imports.alias[from_] and i != imports.alias[from_][i] else i
+            for from_, import_ in imports.items()
+            for i in import_
+        }
+        for model in models:
+            if model.class_name not in imported_names:  # pragma: no cover
+                continue
+
+            model.reference.name = scoped_model_resolver.add(  # pragma: no cover
+                path=get_special_path("imported_name", model.path.split("/")),
+                original_name=model.reference.name,
+                unique=True,
+                class_name=True,
+            ).name
+
+    def __alias_shadowed_imports(  # noqa: PLR6301
+        self,
+        models: list[DataModel],
+        all_model_field_names: set[str],
+    ) -> None:
+        for model in models:
+            for model_field in model.fields:
+                if (
+                    model_field.data_type.type in all_model_field_names
+                    and model_field.data_type.type == model_field.name
+                ):
+                    alias = model_field.data_type.type + "_aliased"
+                    model_field.data_type.type = alias
+                    if model_field.data_type.import_:  # pragma: no cover
+                        model_field.data_type.import_ = Import(
+                            from_=model_field.data_type.import_.from_,
+                            import_=model_field.data_type.import_.import_,
+                            alias=alias,
+                            reference_path=model_field.data_type.import_.reference_path,
+                        )
+
+    def parse(  # noqa: PLR0912, PLR0914, PLR0915
+        self,
+        with_import: bool | None = True,  # noqa: FBT001, FBT002
+        format_: bool | None = True,  # noqa: FBT001, FBT002
+        settings_path: Path | None = None,
+        disable_future_imports: bool = False,  # noqa: FBT001, FBT002
+    ) -> str | dict[tuple[str, ...], Result]:
+        self.parse_raw()
+
+        if with_import and not disable_future_imports:
+            self.imports.append(IMPORT_ANNOTATIONS)
+
+        if format_:
+            code_formatter: CodeFormatter | None = CodeFormatter(
+                self.target_python_version,
+                settings_path,
+                self.wrap_string_literal,
+                skip_string_normalization=not self.use_double_quotes,
+                known_third_party=self.known_third_party,
+                custom_formatters=self.custom_formatter,
+                custom_formatters_kwargs=self.custom_formatters_kwargs,
+                encoding=self.encoding,
+                formatters=self.formatters,
+            )
+        else:
+            code_formatter = None
+
+        _, sorted_data_models, require_update_action_models = sort_data_models(self.results)
+
+        results: dict[tuple[str, ...], Result] = {}
+
+        def module_key(data_model: DataModel) -> tuple[str, ...]:
+            return tuple(data_model.module_path)
+
+        def sort_key(data_model: DataModel) -> tuple[int, tuple[str, ...]]:
+            return (len(data_model.module_path), tuple(data_model.module_path))
+
+        # process in reverse order to correctly establish module levels
+        grouped_models = groupby(
+            sorted(sorted_data_models.values(), key=sort_key, reverse=True),
+            key=module_key,
+        )
+
+        module_models: list[tuple[tuple[str, ...], list[DataModel]]] = []
+        unused_models: list[DataModel] = []
+        model_to_module_models: dict[DataModel, tuple[tuple[str, ...], list[DataModel]]] = {}
+        module_to_import: dict[tuple[str, ...], Imports] = {}
+
+        previous_module: tuple[str, ...] = ()
+        for module, models in ((k, [*v]) for k, v in grouped_models):
+            for model in models:
+                model_to_module_models[model] = module, models
+            self.__delete_duplicate_models(models)
+            self.__replace_duplicate_name_in_module(models)
+            if len(previous_module) - len(module) > 1:
+                module_models.extend(
+                    (
+                        previous_module[:parts],
+                        [],
+                    )
+                    for parts in range(len(previous_module) - 1, len(module), -1)
+                )
+            module_models.append((
+                module,
+                models,
+            ))
+            previous_module = module
+
+        class Processed(NamedTuple):
+            module: tuple[str, ...]
+            models: list[DataModel]
+            init: bool
+            imports: Imports
+            scoped_model_resolver: ModelResolver
+
+        processed_models: list[Processed] = []
+
+        for module_, models in module_models:
+            imports = module_to_import[module_] = Imports(self.use_exact_imports)
+            init = False
+            if module_:
+                parent = (*module_[:-1], "__init__.py")
+                if parent not in results:
+                    results[parent] = Result(body="")
+                if (*module_, "__init__.py") in results:
+                    module = (*module_, "__init__.py")
+                    init = True
+                else:
+                    module = tuple(part.replace("-", "_") for part in (*module_[:-1], f"{module_[-1]}.py"))
+            else:
+                module = ("__init__.py",)
+
+            all_module_fields = {field.name for model in models for field in model.fields if field.name is not None}
+            scoped_model_resolver = ModelResolver(exclude_names=all_module_fields)
+
+            self.__alias_shadowed_imports(models, all_module_fields)
+            self.__override_required_field(models)
+            self.__replace_unique_list_to_set(models)
+            self.__change_from_import(models, imports, scoped_model_resolver, init)
+            self.__extract_inherited_enum(models)
+            self.__set_reference_default_value_to_field(models)
+            self.__reuse_model(models, require_update_action_models)
+            self.__collapse_root_models(models, unused_models, imports, scoped_model_resolver)
+            self.__set_default_enum_member(models)
+            self.__sort_models(models, imports)
+            self.__change_field_name(models)
+            self.__apply_discriminator_type(models, imports)
+            self.__set_one_literal_on_default(models)
+
+            processed_models.append(Processed(module, models, init, imports, scoped_model_resolver))
+
+        for processed_model in processed_models:
+            for model in processed_model.models:
+                processed_model.imports.append(model.imports)
+
+        for unused_model in unused_models:
+            module, models = model_to_module_models[unused_model]
+            if unused_model in models:  # pragma: no cover
+                imports = module_to_import[module]
+                imports.remove(unused_model.imports)
+                models.remove(unused_model)
+
+        for processed_model in processed_models:
+            # postprocess imports to remove unused imports.
+            model_code = str("\n".join([str(m) for m in processed_model.models]))
+            unused_imports = [
+                (from_, import_)
+                for from_, imports_ in processed_model.imports.items()
+                for import_ in imports_
+                if import_ not in model_code
+            ]
+            for from_, import_ in unused_imports:
+                processed_model.imports.remove(Import(from_=from_, import_=import_))
+
+        for module, models, init, imports, scoped_model_resolver in processed_models:  # noqa: B007
+            # process after removing unused models
+            self.__change_imported_model_name(models, imports, scoped_model_resolver)
+
+        for module, models, init, imports, scoped_model_resolver in processed_models:  # noqa: B007
+            result: list[str] = []
+            if models:
+                if with_import:
+                    result += [str(self.imports), str(imports), "\n"]
+
+                code = dump_templates(models)
+                result += [code]
+
+                if self.dump_resolve_reference_action is not None:
+                    result += [
+                        "\n",
+                        self.dump_resolve_reference_action(
+                            m.reference.short_name for m in models if m.path in require_update_action_models
+                        ),
+                    ]
+            if not result and not init:
+                continue
+            body = "\n".join(result)
+            if code_formatter:
+                body = code_formatter.format_code(body)
+
+            results[module] = Result(body=body, source=models[0].file_path if models else None)
+
+        # retain existing behaviour
+        if [*results] == [("__init__.py",)]:
+            return results["__init__.py",].body
+
+        results = {tuple(i.replace("-", "_") for i in k): v for k, v in results.items()}
+        return (
+            self.__postprocess_result_modules(results)
+            if self.treat_dot_as_module
+            else {
+                tuple((part[: part.rfind(".")].replace(".", "_") + part[part.rfind(".") :]) for part in k): v
+                for k, v in results.items()
+            }
+        )
diff -pruN 0.26.4-3/src/datamodel_code_generator/parser/graphql.py 0.34.0-1/src/datamodel_code_generator/parser/graphql.py
--- 0.26.4-3/src/datamodel_code_generator/parser/graphql.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/parser/graphql.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,530 @@
+from __future__ import annotations
+
+from pathlib import Path
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+)
+from urllib.parse import ParseResult
+
+from datamodel_code_generator import (
+    DefaultPutDict,
+    LiteralType,
+    PythonVersion,
+    PythonVersionMin,
+    snooper_to_methods,
+)
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model import pydantic as pydantic_model
+from datamodel_code_generator.model.dataclass import DataClass
+from datamodel_code_generator.model.enum import Enum
+from datamodel_code_generator.model.scalar import DataTypeScalar
+from datamodel_code_generator.model.union import DataTypeUnion
+from datamodel_code_generator.parser.base import (
+    DataType,
+    Parser,
+    Source,
+    escape_characters,
+)
+from datamodel_code_generator.reference import ModelType, Reference
+from datamodel_code_generator.types import DataTypeManager, StrictTypes, Types
+
+try:
+    import graphql
+except ImportError as exc:  # pragma: no cover
+    msg = "Please run `$pip install 'datamodel-code-generator[graphql]`' to generate data-model from a GraphQL schema."
+    raise Exception(msg) from exc  # noqa: TRY002
+
+
+from datamodel_code_generator.format import DEFAULT_FORMATTERS, DatetimeClassType, Formatter
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+    from collections.abc import Iterable, Iterator, Mapping, Sequence
+
+graphql_resolver = graphql.type.introspection.TypeResolvers()
+
+
+def build_graphql_schema(schema_str: str) -> graphql.GraphQLSchema:
+    """Build a graphql schema from a string."""
+    schema = graphql.build_schema(schema_str)
+    return graphql.lexicographic_sort_schema(schema)
+
+
+@snooper_to_methods()
+class GraphQLParser(Parser):
+    # raw graphql schema as `graphql-core` object
+    raw_obj: graphql.GraphQLSchema
+    # all processed graphql objects
+    # mapper from an object name (unique) to an object
+    all_graphql_objects: dict[str, graphql.GraphQLNamedType]
+    # a reference for each object
+    # mapper from an object name to his reference
+    references: dict[str, Reference] = {}  # noqa: RUF012
+    # mapper from graphql type to all objects with this type
+    # `graphql.type.introspection.TypeKind` -- an enum with all supported types
+    # `graphql.GraphQLNamedType` -- base type for each graphql object
+    # see `graphql-core` for more details
+    support_graphql_types: dict[graphql.type.introspection.TypeKind, list[graphql.GraphQLNamedType]]
+    # graphql types order for render
+    # may be as a parameter in the future
+    parse_order: list[graphql.type.introspection.TypeKind] = [  # noqa: RUF012
+        graphql.type.introspection.TypeKind.SCALAR,
+        graphql.type.introspection.TypeKind.ENUM,
+        graphql.type.introspection.TypeKind.INTERFACE,
+        graphql.type.introspection.TypeKind.OBJECT,
+        graphql.type.introspection.TypeKind.INPUT_OBJECT,
+        graphql.type.introspection.TypeKind.UNION,
+    ]
+
+    def __init__(  # noqa: PLR0913
+        self,
+        source: str | Path | ParseResult,
+        *,
+        data_model_type: type[DataModel] = pydantic_model.BaseModel,
+        data_model_root_type: type[DataModel] = pydantic_model.CustomRootType,
+        data_model_scalar_type: type[DataModel] = DataTypeScalar,
+        data_model_union_type: type[DataModel] = DataTypeUnion,
+        data_type_manager_type: type[DataTypeManager] = pydantic_model.DataTypeManager,
+        data_model_field_type: type[DataModelFieldBase] = pydantic_model.DataModelField,
+        base_class: str | None = None,
+        additional_imports: list[str] | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        target_python_version: PythonVersion = PythonVersionMin,
+        dump_resolve_reference_action: Callable[[Iterable[str]], str] | None = None,
+        validation: bool = False,
+        field_constraints: bool = False,
+        snake_case_field: bool = False,
+        strip_default_none: bool = False,
+        aliases: Mapping[str, str] | None = None,
+        allow_population_by_field_name: bool = False,
+        apply_default_values_for_required_fields: bool = False,
+        allow_extra_fields: bool = False,
+        extra_fields: str | None = None,
+        force_optional_for_required_fields: bool = False,
+        class_name: str | None = None,
+        use_standard_collections: bool = False,
+        base_path: Path | None = None,
+        use_schema_description: bool = False,
+        use_field_description: bool = False,
+        use_default_kwarg: bool = False,
+        reuse_model: bool = False,
+        encoding: str = "utf-8",
+        enum_field_as_literal: LiteralType | None = None,
+        set_default_enum_member: bool = False,
+        use_subclass_enum: bool = False,
+        strict_nullable: bool = False,
+        use_generic_container_types: bool = False,
+        enable_faux_immutability: bool = False,
+        remote_text_cache: DefaultPutDict[str, str] | None = None,
+        disable_appending_item_suffix: bool = False,
+        strict_types: Sequence[StrictTypes] | None = None,
+        empty_enum_field_name: str | None = None,
+        custom_class_name_generator: Callable[[str], str] | None = None,
+        field_extra_keys: set[str] | None = None,
+        field_include_all_keys: bool = False,
+        field_extra_keys_without_x_prefix: set[str] | None = None,
+        wrap_string_literal: bool | None = None,
+        use_title_as_name: bool = False,
+        use_operation_id_as_name: bool = False,
+        use_unique_items_as_set: bool = False,
+        http_headers: Sequence[tuple[str, str]] | None = None,
+        http_ignore_tls: bool = False,
+        use_annotated: bool = False,
+        use_non_positive_negative_number_constrained_types: bool = False,
+        original_field_name_delimiter: str | None = None,
+        use_double_quotes: bool = False,
+        use_union_operator: bool = False,
+        allow_responses_without_content: bool = False,
+        collapse_root_models: bool = False,
+        special_field_name_prefix: str | None = None,
+        remove_special_field_name_prefix: bool = False,
+        capitalise_enum_members: bool = False,
+        keep_model_order: bool = False,
+        use_one_literal_as_default: bool = False,
+        known_third_party: list[str] | None = None,
+        custom_formatters: list[str] | None = None,
+        custom_formatters_kwargs: dict[str, Any] | None = None,
+        use_pendulum: bool = False,
+        http_query_parameters: Sequence[tuple[str, str]] | None = None,
+        treat_dot_as_module: bool = False,
+        use_exact_imports: bool = False,
+        default_field_extras: dict[str, Any] | None = None,
+        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
+        keyword_only: bool = False,
+        frozen_dataclasses: bool = False,
+        no_alias: bool = False,
+        formatters: list[Formatter] = DEFAULT_FORMATTERS,
+        parent_scoped_naming: bool = False,
+    ) -> None:
+        super().__init__(
+            source=source,
+            data_model_type=data_model_type,
+            data_model_root_type=data_model_root_type,
+            data_type_manager_type=data_type_manager_type,
+            data_model_field_type=data_model_field_type,
+            base_class=base_class,
+            additional_imports=additional_imports,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            target_python_version=target_python_version,
+            dump_resolve_reference_action=dump_resolve_reference_action,
+            validation=validation,
+            field_constraints=field_constraints,
+            snake_case_field=snake_case_field,
+            strip_default_none=strip_default_none,
+            aliases=aliases,
+            allow_population_by_field_name=allow_population_by_field_name,
+            allow_extra_fields=allow_extra_fields,
+            extra_fields=extra_fields,
+            apply_default_values_for_required_fields=apply_default_values_for_required_fields,
+            force_optional_for_required_fields=force_optional_for_required_fields,
+            class_name=class_name,
+            use_standard_collections=use_standard_collections,
+            base_path=base_path,
+            use_schema_description=use_schema_description,
+            use_field_description=use_field_description,
+            use_default_kwarg=use_default_kwarg,
+            reuse_model=reuse_model,
+            encoding=encoding,
+            enum_field_as_literal=enum_field_as_literal,
+            use_one_literal_as_default=use_one_literal_as_default,
+            set_default_enum_member=set_default_enum_member,
+            use_subclass_enum=use_subclass_enum,
+            strict_nullable=strict_nullable,
+            use_generic_container_types=use_generic_container_types,
+            enable_faux_immutability=enable_faux_immutability,
+            remote_text_cache=remote_text_cache,
+            disable_appending_item_suffix=disable_appending_item_suffix,
+            strict_types=strict_types,
+            empty_enum_field_name=empty_enum_field_name,
+            custom_class_name_generator=custom_class_name_generator,
+            field_extra_keys=field_extra_keys,
+            field_include_all_keys=field_include_all_keys,
+            field_extra_keys_without_x_prefix=field_extra_keys_without_x_prefix,
+            wrap_string_literal=wrap_string_literal,
+            use_title_as_name=use_title_as_name,
+            use_operation_id_as_name=use_operation_id_as_name,
+            use_unique_items_as_set=use_unique_items_as_set,
+            http_headers=http_headers,
+            http_ignore_tls=http_ignore_tls,
+            use_annotated=use_annotated,
+            use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
+            original_field_name_delimiter=original_field_name_delimiter,
+            use_double_quotes=use_double_quotes,
+            use_union_operator=use_union_operator,
+            allow_responses_without_content=allow_responses_without_content,
+            collapse_root_models=collapse_root_models,
+            special_field_name_prefix=special_field_name_prefix,
+            remove_special_field_name_prefix=remove_special_field_name_prefix,
+            capitalise_enum_members=capitalise_enum_members,
+            keep_model_order=keep_model_order,
+            known_third_party=known_third_party,
+            custom_formatters=custom_formatters,
+            custom_formatters_kwargs=custom_formatters_kwargs,
+            use_pendulum=use_pendulum,
+            http_query_parameters=http_query_parameters,
+            treat_dot_as_module=treat_dot_as_module,
+            use_exact_imports=use_exact_imports,
+            default_field_extras=default_field_extras,
+            target_datetime_class=target_datetime_class,
+            keyword_only=keyword_only,
+            frozen_dataclasses=frozen_dataclasses,
+            no_alias=no_alias,
+            formatters=formatters,
+            parent_scoped_naming=parent_scoped_naming,
+        )
+
+        self.data_model_scalar_type = data_model_scalar_type
+        self.data_model_union_type = data_model_union_type
+        self.use_standard_collections = use_standard_collections
+        self.use_union_operator = use_union_operator
+
+    def _get_context_source_path_parts(self) -> Iterator[tuple[Source, list[str]]]:
+        # TODO (denisart): Temporarily this method duplicates
+        # the method `datamodel_code_generator.parser.jsonschema.JsonSchemaParser._get_context_source_path_parts`.
+
+        if isinstance(self.source, list) or (  # pragma: no cover
+            isinstance(self.source, Path) and self.source.is_dir()
+        ):  # pragma: no cover
+            self.current_source_path = Path()
+            self.model_resolver.after_load_files = {
+                self.base_path.joinpath(s.path).resolve().as_posix() for s in self.iter_source
+            }
+
+        for source in self.iter_source:
+            if isinstance(self.source, ParseResult):  # pragma: no cover
+                path_parts = self.get_url_path_parts(self.source)
+            else:
+                path_parts = list(source.path.parts)
+            if self.current_source_path is not None:  # pragma: no cover
+                self.current_source_path = source.path
+            with (
+                self.model_resolver.current_base_path_context(source.path.parent),
+                self.model_resolver.current_root_context(path_parts),
+            ):
+                yield source, path_parts
+
+    def _resolve_types(self, paths: list[str], schema: graphql.GraphQLSchema) -> None:
+        for type_name, type_ in schema.type_map.items():
+            if type_name.startswith("__"):
+                continue
+
+            if type_name in {"Query", "Mutation"}:
+                continue
+
+            resolved_type = graphql_resolver.kind(type_, None)
+
+            if resolved_type in self.support_graphql_types:  # pragma: no cover
+                self.all_graphql_objects[type_.name] = type_
+                # TODO: need a special method for each graph type
+                self.references[type_.name] = Reference(
+                    path=f"{paths!s}/{resolved_type.value}/{type_.name}",
+                    name=type_.name,
+                    original_name=type_.name,
+                )
+
+                self.support_graphql_types[resolved_type].append(type_)
+
+    def _create_data_model(self, model_type: type[DataModel] | None = None, **kwargs: Any) -> DataModel:
+        """Create data model instance with conditional frozen parameter for DataClass."""
+        data_model_class = model_type or self.data_model_type
+        if issubclass(data_model_class, DataClass):
+            kwargs["frozen"] = self.frozen_dataclasses
+        return data_model_class(**kwargs)
+
+    def _typename_field(self, name: str) -> DataModelFieldBase:
+        return self.data_model_field_type(
+            name="typename__",
+            data_type=DataType(
+                literals=[name],
+                use_union_operator=self.use_union_operator,
+                use_standard_collections=self.use_standard_collections,
+            ),
+            default=name,
+            use_annotated=self.use_annotated,
+            required=False,
+            alias="__typename",
+            use_one_literal_as_default=True,
+            use_default_kwarg=self.use_default_kwarg,
+            has_default=True,
+        )
+
+    def _get_default(  # noqa: PLR6301
+        self,
+        field: graphql.GraphQLField | graphql.GraphQLInputField,
+        final_data_type: DataType,
+        required: bool,  # noqa: FBT001
+    ) -> Any:
+        if isinstance(field, graphql.GraphQLInputField):  # pragma: no cover
+            if field.default_value == graphql.pyutils.Undefined:  # pragma: no cover
+                return None
+            return field.default_value
+        if required is False and final_data_type.is_list:
+            return None
+
+        return None
+
+    def parse_scalar(self, scalar_graphql_object: graphql.GraphQLScalarType) -> None:
+        self.results.append(
+            self.data_model_scalar_type(
+                reference=self.references[scalar_graphql_object.name],
+                fields=[],
+                custom_template_dir=self.custom_template_dir,
+                extra_template_data=self.extra_template_data,
+                description=scalar_graphql_object.description,
+            )
+        )
+
+    def parse_enum(self, enum_object: graphql.GraphQLEnumType) -> None:
+        enum_fields: list[DataModelFieldBase] = []
+        exclude_field_names: set[str] = set()
+
+        for value_name, value in enum_object.values.items():
+            default = f"'{value_name.translate(escape_characters)}'" if isinstance(value_name, str) else value_name
+
+            field_name = self.model_resolver.get_valid_field_name(
+                value_name, excludes=exclude_field_names, model_type=ModelType.ENUM
+            )
+            exclude_field_names.add(field_name)
+
+            enum_fields.append(
+                self.data_model_field_type(
+                    name=field_name,
+                    data_type=self.data_type_manager.get_data_type(
+                        Types.string,
+                    ),
+                    default=default,
+                    required=True,
+                    strip_default_none=self.strip_default_none,
+                    has_default=True,
+                    use_field_description=value.description is not None,
+                    original_name=None,
+                )
+            )
+
+        enum = Enum(
+            reference=self.references[enum_object.name],
+            fields=enum_fields,
+            path=self.current_source_path,
+            description=enum_object.description,
+            custom_template_dir=self.custom_template_dir,
+        )
+        self.results.append(enum)
+
+    def parse_field(
+        self,
+        field_name: str,
+        alias: str | None,
+        field: graphql.GraphQLField | graphql.GraphQLInputField,
+    ) -> DataModelFieldBase:
+        final_data_type = DataType(
+            is_optional=True,
+            use_union_operator=self.use_union_operator,
+            use_standard_collections=self.use_standard_collections,
+        )
+        data_type = final_data_type
+        obj = field.type
+
+        while graphql.is_list_type(obj) or graphql.is_non_null_type(obj):
+            if graphql.is_list_type(obj):
+                data_type.is_list = True
+
+                new_data_type = DataType(
+                    is_optional=True,
+                    use_union_operator=self.use_union_operator,
+                    use_standard_collections=self.use_standard_collections,
+                )
+                data_type.data_types = [new_data_type]
+
+                data_type = new_data_type
+            elif graphql.is_non_null_type(obj):  # pragma: no cover
+                data_type.is_optional = False
+
+            obj = graphql.assert_wrapping_type(obj)
+            obj = obj.of_type
+
+        if graphql.is_enum_type(obj):
+            obj = graphql.assert_enum_type(obj)
+            data_type.reference = self.references[obj.name]
+
+        obj = graphql.assert_named_type(obj)
+        data_type.type = obj.name
+
+        required = (not self.force_optional_for_required_fields) and (not final_data_type.is_optional)
+
+        default = self._get_default(field, final_data_type, required)
+        extras = {} if self.default_field_extras is None else self.default_field_extras.copy()
+
+        if field.description is not None:  # pragma: no cover
+            extras["description"] = field.description
+
+        return self.data_model_field_type(
+            name=field_name,
+            default=default,
+            data_type=final_data_type,
+            required=required,
+            extras=extras,
+            alias=alias,
+            strip_default_none=self.strip_default_none,
+            use_annotated=self.use_annotated,
+            use_field_description=self.use_field_description,
+            use_default_kwarg=self.use_default_kwarg,
+            original_name=field_name,
+            has_default=default is not None,
+        )
+
+    def parse_object_like(
+        self,
+        obj: graphql.GraphQLInterfaceType | graphql.GraphQLObjectType | graphql.GraphQLInputObjectType,
+    ) -> None:
+        fields = []
+        exclude_field_names: set[str] = set()
+
+        for field_name, field in obj.fields.items():
+            field_name_, alias = self.model_resolver.get_valid_field_name_and_alias(
+                field_name, excludes=exclude_field_names
+            )
+            exclude_field_names.add(field_name_)
+
+            data_model_field_type = self.parse_field(field_name_, alias, field)
+            fields.append(data_model_field_type)
+
+        fields.append(self._typename_field(obj.name))
+
+        base_classes = []
+        if hasattr(obj, "interfaces"):  # pragma: no cover
+            base_classes = [self.references[i.name] for i in obj.interfaces]  # pyright: ignore[reportAttributeAccessIssue]
+
+        data_model_type = self._create_data_model(
+            reference=self.references[obj.name],
+            fields=fields,
+            base_classes=base_classes,
+            custom_base_class=self.base_class,
+            custom_template_dir=self.custom_template_dir,
+            extra_template_data=self.extra_template_data,
+            path=self.current_source_path,
+            description=obj.description,
+            keyword_only=self.keyword_only,
+            treat_dot_as_module=self.treat_dot_as_module,
+        )
+        self.results.append(data_model_type)
+
+    def parse_interface(self, interface_graphql_object: graphql.GraphQLInterfaceType) -> None:
+        self.parse_object_like(interface_graphql_object)
+
+    def parse_object(self, graphql_object: graphql.GraphQLObjectType) -> None:
+        self.parse_object_like(graphql_object)
+
+    def parse_input_object(self, input_graphql_object: graphql.GraphQLInputObjectType) -> None:
+        self.parse_object_like(input_graphql_object)  # pragma: no cover
+
+    def parse_union(self, union_object: graphql.GraphQLUnionType) -> None:
+        fields = [self.data_model_field_type(name=type_.name, data_type=DataType()) for type_ in union_object.types]
+
+        data_model_type = self.data_model_union_type(
+            reference=self.references[union_object.name],
+            fields=fields,
+            custom_base_class=self.base_class,
+            custom_template_dir=self.custom_template_dir,
+            extra_template_data=self.extra_template_data,
+            path=self.current_source_path,
+            description=union_object.description,
+        )
+        self.results.append(data_model_type)
+
+    def parse_raw(self) -> None:
+        self.all_graphql_objects = {}
+        self.references: dict[str, Reference] = {}
+
+        self.support_graphql_types = {
+            graphql.type.introspection.TypeKind.SCALAR: [],
+            graphql.type.introspection.TypeKind.ENUM: [],
+            graphql.type.introspection.TypeKind.UNION: [],
+            graphql.type.introspection.TypeKind.INTERFACE: [],
+            graphql.type.introspection.TypeKind.OBJECT: [],
+            graphql.type.introspection.TypeKind.INPUT_OBJECT: [],
+        }
+
+        # may be as a parameter in the future (??)
+        mapper_from_graphql_type_to_parser_method = {
+            graphql.type.introspection.TypeKind.SCALAR: self.parse_scalar,
+            graphql.type.introspection.TypeKind.ENUM: self.parse_enum,
+            graphql.type.introspection.TypeKind.INTERFACE: self.parse_interface,
+            graphql.type.introspection.TypeKind.OBJECT: self.parse_object,
+            graphql.type.introspection.TypeKind.INPUT_OBJECT: self.parse_input_object,
+            graphql.type.introspection.TypeKind.UNION: self.parse_union,
+        }
+
+        for source, path_parts in self._get_context_source_path_parts():
+            schema: graphql.GraphQLSchema = build_graphql_schema(source.text)
+            self.raw_obj = schema
+
+            self._resolve_types(path_parts, schema)
+
+            for next_type in self.parse_order:
+                for obj in self.support_graphql_types[next_type]:
+                    parser_ = mapper_from_graphql_type_to_parser_method[next_type]
+                    parser_(obj)
diff -pruN 0.26.4-3/src/datamodel_code_generator/parser/jsonschema.py 0.34.0-1/src/datamodel_code_generator/parser/jsonschema.py
--- 0.26.4-3/src/datamodel_code_generator/parser/jsonschema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/parser/jsonschema.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,1749 @@
+from __future__ import annotations
+
+import enum as _enum
+from collections import defaultdict
+from contextlib import contextmanager
+from functools import cached_property, lru_cache
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, Union
+from urllib.parse import ParseResult, unquote
+from warnings import warn
+
+from pydantic import (
+    Field,
+)
+
+from datamodel_code_generator import (
+    InvalidClassNameError,
+    load_yaml,
+    load_yaml_from_path,
+    snooper_to_methods,
+)
+from datamodel_code_generator.format import DEFAULT_FORMATTERS, Formatter, PythonVersion, PythonVersionMin
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model import pydantic as pydantic_model
+from datamodel_code_generator.model.base import UNDEFINED, get_module_name
+from datamodel_code_generator.model.dataclass import DataClass
+from datamodel_code_generator.model.enum import Enum
+from datamodel_code_generator.parser import DefaultPutDict, LiteralType
+from datamodel_code_generator.parser.base import (
+    SPECIAL_PATH_FORMAT,
+    Parser,
+    Source,
+    escape_characters,
+    get_special_path,
+    title_to_class_name,
+)
+from datamodel_code_generator.reference import ModelType, Reference, is_url
+from datamodel_code_generator.types import (
+    DataType,
+    DataTypeManager,
+    EmptyDataType,
+    StrictTypes,
+    Types,
+    UnionIntFloat,
+)
+from datamodel_code_generator.util import (
+    PYDANTIC_V2,
+    BaseModel,
+    field_validator,
+    model_validator,
+)
+
+if PYDANTIC_V2:
+    from pydantic import ConfigDict
+
+from datamodel_code_generator.format import DatetimeClassType
+
+if TYPE_CHECKING:
+    from collections.abc import Generator, Iterable, Iterator, Mapping, Sequence
+
+
+def unescape_json_pointer_segment(segment: str) -> str:
+    # Unescape ~1, ~0, and percent-encoding
+    return unquote(segment.replace("~1", "/").replace("~0", "~"))
+
+
+def get_model_by_path(schema: dict[str, Any] | list[Any], keys: list[str] | list[int]) -> dict[Any, Any]:
+    model: dict[Any, Any] | list[Any]
+    if not keys:
+        model = schema
+    else:
+        # Unescape the key if it's a string (JSON pointer segment)
+        key = keys[0]
+        if isinstance(key, str):
+            key = unescape_json_pointer_segment(key)
+        if len(keys) == 1:
+            model = schema.get(str(key), {}) if isinstance(schema, dict) else schema[int(key)]
+        elif isinstance(schema, dict):
+            model = get_model_by_path(schema[str(key)], keys[1:])
+        else:
+            model = get_model_by_path(schema[int(key)], keys[1:])
+    if isinstance(model, dict):
+        return model
+    msg = f"Does not support json pointer to array. schema={schema}, key={keys}"
+    raise NotImplementedError(  # pragma: no cover
+        msg
+    )
+
+
+json_schema_data_formats: dict[str, dict[str, Types]] = {
+    "integer": {
+        "int32": Types.int32,
+        "int64": Types.int64,
+        "default": Types.integer,
+        "date-time": Types.date_time,
+        "unix-time": Types.int64,
+    },
+    "number": {
+        "float": Types.float,
+        "double": Types.double,
+        "decimal": Types.decimal,
+        "date-time": Types.date_time,
+        "time": Types.time,
+        "default": Types.number,
+    },
+    "string": {
+        "default": Types.string,
+        "byte": Types.byte,  # base64 encoded string
+        "binary": Types.binary,
+        "date": Types.date,
+        "date-time": Types.date_time,
+        "duration": Types.timedelta,
+        "time": Types.time,
+        "password": Types.password,
+        "path": Types.path,
+        "email": Types.email,
+        "idn-email": Types.email,
+        "uuid": Types.uuid,
+        "uuid1": Types.uuid1,
+        "uuid2": Types.uuid2,
+        "uuid3": Types.uuid3,
+        "uuid4": Types.uuid4,
+        "uuid5": Types.uuid5,
+        "uri": Types.uri,
+        "uri-reference": Types.string,
+        "hostname": Types.hostname,
+        "ipv4": Types.ipv4,
+        "ipv4-network": Types.ipv4_network,
+        "ipv6": Types.ipv6,
+        "ipv6-network": Types.ipv6_network,
+        "decimal": Types.decimal,
+        "integer": Types.integer,
+    },
+    "boolean": {"default": Types.boolean},
+    "object": {"default": Types.object},
+    "null": {"default": Types.null},
+    "array": {"default": Types.array},
+}
+
+
+class JSONReference(_enum.Enum):
+    LOCAL = "LOCAL"
+    REMOTE = "REMOTE"
+    URL = "URL"
+
+
+class Discriminator(BaseModel):
+    propertyName: str  # noqa: N815
+    mapping: Optional[dict[str, str]] = None  # noqa: UP045
+
+
+class JsonSchemaObject(BaseModel):
+    if not TYPE_CHECKING:
+        if PYDANTIC_V2:
+
+            @classmethod
+            def get_fields(cls) -> dict[str, Any]:
+                return cls.model_fields
+
+        else:
+
+            @classmethod
+            def get_fields(cls) -> dict[str, Any]:
+                return cls.__fields__
+
+            @classmethod
+            def model_rebuild(cls) -> None:
+                cls.update_forward_refs()
+
+    __constraint_fields__: set[str] = {  # noqa: RUF012
+        "exclusiveMinimum",
+        "minimum",
+        "exclusiveMaximum",
+        "maximum",
+        "multipleOf",
+        "minItems",
+        "maxItems",
+        "minLength",
+        "maxLength",
+        "pattern",
+        "uniqueItems",
+    }
+    __extra_key__: str = SPECIAL_PATH_FORMAT.format("extras")
+
+    @model_validator(mode="before")
+    def validate_exclusive_maximum_and_exclusive_minimum(cls, values: Any) -> Any:  # noqa: N805
+        if not isinstance(values, dict):
+            return values
+        exclusive_maximum: float | bool | None = values.get("exclusiveMaximum")
+        exclusive_minimum: float | bool | None = values.get("exclusiveMinimum")
+
+        if exclusive_maximum is True:
+            values["exclusiveMaximum"] = values["maximum"]
+            del values["maximum"]
+        elif exclusive_maximum is False:
+            del values["exclusiveMaximum"]
+        if exclusive_minimum is True:
+            values["exclusiveMinimum"] = values["minimum"]
+            del values["minimum"]
+        elif exclusive_minimum is False:
+            del values["exclusiveMinimum"]
+        return values
+
+    @field_validator("ref")
+    def validate_ref(cls, value: Any) -> Any:  # noqa: N805
+        if isinstance(value, str) and "#" in value:
+            if value.endswith("#/"):
+                return value[:-1]
+            if "#/" in value or value[0] == "#" or value[-1] == "#":
+                return value
+            return value.replace("#", "#/")
+        return value
+
+    @field_validator("required", mode="before")
+    def validate_required(cls, value: Any) -> Any:  # noqa: N805
+        if value is None:
+            return []
+        if isinstance(value, list):  # noqa: PLR1702
+            # Filter to only include valid strings, excluding invalid objects
+            required_fields: list[str] = []
+            for item in value:
+                if isinstance(item, str):
+                    required_fields.append(item)
+
+                # In some cases, the required field can include "anyOf", "oneOf", or "allOf" as a dict (#2297)
+                elif isinstance(item, dict):
+                    for key, val in item.items():
+                        if isinstance(val, list):
+                            # If 'anyOf' or "oneOf" is present, we won't include it in required fields
+                            if key in {"anyOf", "oneOf"}:
+                                continue
+
+                            if key == "allOf":
+                                # If 'allOf' is present, we include them as required fields
+                                required_fields.extend(sub_item for sub_item in val if isinstance(sub_item, str))
+
+            value = required_fields
+
+        return value
+
+    items: Optional[Union[list[JsonSchemaObject], JsonSchemaObject, bool]] = None  # noqa: UP007, UP045
+    uniqueItems: Optional[bool] = None  # noqa: N815, UP045
+    type: Optional[Union[str, list[str]]] = None  # noqa: UP007, UP045
+    format: Optional[str] = None  # noqa: UP045
+    pattern: Optional[str] = None  # noqa: UP045
+    minLength: Optional[int] = None  # noqa:  N815,UP045
+    maxLength: Optional[int] = None  # noqa:  N815,UP045
+    minimum: Optional[UnionIntFloat] = None  # noqa:  UP045
+    maximum: Optional[UnionIntFloat] = None  # noqa:  UP045
+    minItems: Optional[int] = None  # noqa:  N815,UP045
+    maxItems: Optional[int] = None  # noqa:  N815,UP045
+    multipleOf: Optional[float] = None  # noqa: N815, UP045
+    exclusiveMaximum: Optional[Union[float, bool]] = None  # noqa: N815, UP007, UP045
+    exclusiveMinimum: Optional[Union[float, bool]] = None  # noqa: N815, UP007, UP045
+    additionalProperties: Optional[Union[JsonSchemaObject, bool]] = None  # noqa: N815, UP007, UP045
+    patternProperties: Optional[dict[str, JsonSchemaObject]] = None  # noqa: N815, UP045
+    oneOf: list[JsonSchemaObject] = []  # noqa: N815, RUF012
+    anyOf: list[JsonSchemaObject] = []  # noqa: N815, RUF012
+    allOf: list[JsonSchemaObject] = []  # noqa: N815, RUF012
+    enum: list[Any] = []  # noqa: RUF012
+    writeOnly: Optional[bool] = None  # noqa: N815, UP045
+    readOnly: Optional[bool] = None  # noqa: N815, UP045
+    properties: Optional[dict[str, Union[JsonSchemaObject, bool]]] = None  # noqa: UP007, UP045
+    required: list[str] = []  # noqa: RUF012
+    ref: Optional[str] = Field(default=None, alias="$ref")  # noqa: UP045
+    nullable: Optional[bool] = False  # noqa: UP045
+    x_enum_varnames: list[str] = Field(default=[], alias="x-enum-varnames")
+    description: Optional[str] = None  # noqa: UP045
+    title: Optional[str] = None  # noqa: UP045
+    example: Any = None
+    examples: Any = None
+    default: Any = None
+    id: Optional[str] = Field(default=None, alias="$id")  # noqa: UP045
+    custom_type_path: Optional[str] = Field(default=None, alias="customTypePath")  # noqa: UP045
+    custom_base_path: Optional[str] = Field(default=None, alias="customBasePath")  # noqa: UP045
+    extras: dict[str, Any] = Field(alias=__extra_key__, default_factory=dict)
+    discriminator: Optional[Union[Discriminator, str]] = None  # noqa: UP007, UP045
+    if PYDANTIC_V2:
+        model_config = ConfigDict(  # pyright: ignore[reportPossiblyUnboundVariable]
+            arbitrary_types_allowed=True,
+            ignored_types=(cached_property,),
+        )
+    else:
+
+        class Config:
+            arbitrary_types_allowed = True
+            keep_untouched = (cached_property,)
+            smart_casts = True
+
+    if not TYPE_CHECKING:
+
+        def __init__(self, **data: Any) -> None:
+            super().__init__(**data)
+            self.extras = {k: v for k, v in data.items() if k not in EXCLUDE_FIELD_KEYS}
+            if "const" in data.get(self.__extra_key__, {}):
+                self.extras["const"] = data[self.__extra_key__]["const"]
+
+    @cached_property
+    def is_object(self) -> bool:
+        return self.properties is not None or (
+            self.type == "object" and not self.allOf and not self.oneOf and not self.anyOf and not self.ref
+        )
+
+    @cached_property
+    def is_array(self) -> bool:
+        return self.items is not None or self.type == "array"
+
+    @cached_property
+    def ref_object_name(self) -> str:  # pragma: no cover
+        return (self.ref or "").rsplit("/", 1)[-1]
+
+    @field_validator("items", mode="before")
+    def validate_items(cls, values: Any) -> Any:  # noqa: N805
+        # this condition expects empty dict
+        return values or None
+
+    @cached_property
+    def has_default(self) -> bool:
+        return "default" in self.__fields_set__ or "default_factory" in self.extras
+
+    @cached_property
+    def has_constraint(self) -> bool:
+        return bool(self.__constraint_fields__ & self.__fields_set__)
+
+    @cached_property
+    def ref_type(self) -> JSONReference | None:
+        if self.ref:
+            return get_ref_type(self.ref)
+        return None  # pragma: no cover
+
+    @cached_property
+    def type_has_null(self) -> bool:
+        return isinstance(self.type, list) and "null" in self.type
+
+
+@lru_cache
+def get_ref_type(ref: str) -> JSONReference:
+    if ref[0] == "#":
+        return JSONReference.LOCAL
+    if is_url(ref):
+        return JSONReference.URL
+    return JSONReference.REMOTE
+
+
+def _get_type(type_: str, format__: str | None = None) -> Types:
+    if type_ not in json_schema_data_formats:
+        return Types.any
+    data_formats: Types | None = json_schema_data_formats[type_].get("default" if format__ is None else format__)
+    if data_formats is not None:
+        return data_formats
+
+    warn(f"format of {format__!r} not understood for {type_!r} - using default", stacklevel=2)
+    return json_schema_data_formats[type_]["default"]
+
+
+JsonSchemaObject.model_rebuild()
+
+DEFAULT_FIELD_KEYS: set[str] = {
+    "example",
+    "examples",
+    "description",
+    "discriminator",
+    "title",
+    "const",
+    "default_factory",
+}
+
+EXCLUDE_FIELD_KEYS_IN_JSON_SCHEMA: set[str] = {
+    "readOnly",
+    "writeOnly",
+}
+
+EXCLUDE_FIELD_KEYS = (
+    set(JsonSchemaObject.get_fields())  # pyright: ignore[reportAttributeAccessIssue]
+    - DEFAULT_FIELD_KEYS
+    - EXCLUDE_FIELD_KEYS_IN_JSON_SCHEMA
+) | {
+    "$id",
+    "$ref",
+    JsonSchemaObject.__extra_key__,
+}
+
+
+@snooper_to_methods()  # noqa: PLR0904
+class JsonSchemaParser(Parser):
+    SCHEMA_PATHS: ClassVar[list[str]] = ["#/definitions", "#/$defs"]
+    SCHEMA_OBJECT_TYPE: ClassVar[type[JsonSchemaObject]] = JsonSchemaObject
+
+    def __init__(  # noqa: PLR0913
+        self,
+        source: str | Path | list[Path] | ParseResult,
+        *,
+        data_model_type: type[DataModel] = pydantic_model.BaseModel,
+        data_model_root_type: type[DataModel] = pydantic_model.CustomRootType,
+        data_type_manager_type: type[DataTypeManager] = pydantic_model.DataTypeManager,
+        data_model_field_type: type[DataModelFieldBase] = pydantic_model.DataModelField,
+        base_class: str | None = None,
+        additional_imports: list[str] | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        target_python_version: PythonVersion = PythonVersionMin,
+        dump_resolve_reference_action: Callable[[Iterable[str]], str] | None = None,
+        validation: bool = False,
+        field_constraints: bool = False,
+        snake_case_field: bool = False,
+        strip_default_none: bool = False,
+        aliases: Mapping[str, str] | None = None,
+        allow_population_by_field_name: bool = False,
+        apply_default_values_for_required_fields: bool = False,
+        allow_extra_fields: bool = False,
+        extra_fields: str | None = None,
+        force_optional_for_required_fields: bool = False,
+        class_name: str | None = None,
+        use_standard_collections: bool = False,
+        base_path: Path | None = None,
+        use_schema_description: bool = False,
+        use_field_description: bool = False,
+        use_default_kwarg: bool = False,
+        reuse_model: bool = False,
+        encoding: str = "utf-8",
+        enum_field_as_literal: LiteralType | None = None,
+        use_one_literal_as_default: bool = False,
+        set_default_enum_member: bool = False,
+        use_subclass_enum: bool = False,
+        strict_nullable: bool = False,
+        use_generic_container_types: bool = False,
+        enable_faux_immutability: bool = False,
+        remote_text_cache: DefaultPutDict[str, str] | None = None,
+        disable_appending_item_suffix: bool = False,
+        strict_types: Sequence[StrictTypes] | None = None,
+        empty_enum_field_name: str | None = None,
+        custom_class_name_generator: Callable[[str], str] | None = None,
+        field_extra_keys: set[str] | None = None,
+        field_include_all_keys: bool = False,
+        field_extra_keys_without_x_prefix: set[str] | None = None,
+        wrap_string_literal: bool | None = None,
+        use_title_as_name: bool = False,
+        use_operation_id_as_name: bool = False,
+        use_unique_items_as_set: bool = False,
+        http_headers: Sequence[tuple[str, str]] | None = None,
+        http_ignore_tls: bool = False,
+        use_annotated: bool = False,
+        use_non_positive_negative_number_constrained_types: bool = False,
+        original_field_name_delimiter: str | None = None,
+        use_double_quotes: bool = False,
+        use_union_operator: bool = False,
+        allow_responses_without_content: bool = False,
+        collapse_root_models: bool = False,
+        special_field_name_prefix: str | None = None,
+        remove_special_field_name_prefix: bool = False,
+        capitalise_enum_members: bool = False,
+        keep_model_order: bool = False,
+        known_third_party: list[str] | None = None,
+        custom_formatters: list[str] | None = None,
+        custom_formatters_kwargs: dict[str, Any] | None = None,
+        use_pendulum: bool = False,
+        http_query_parameters: Sequence[tuple[str, str]] | None = None,
+        treat_dot_as_module: bool = False,
+        use_exact_imports: bool = False,
+        default_field_extras: dict[str, Any] | None = None,
+        target_datetime_class: DatetimeClassType | None = None,
+        keyword_only: bool = False,
+        frozen_dataclasses: bool = False,
+        no_alias: bool = False,
+        formatters: list[Formatter] = DEFAULT_FORMATTERS,
+        parent_scoped_naming: bool = False,
+    ) -> None:
+        target_datetime_class = target_datetime_class or DatetimeClassType.Awaredatetime
+        super().__init__(
+            source=source,
+            data_model_type=data_model_type,
+            data_model_root_type=data_model_root_type,
+            data_type_manager_type=data_type_manager_type,
+            data_model_field_type=data_model_field_type,
+            base_class=base_class,
+            additional_imports=additional_imports,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            target_python_version=target_python_version,
+            dump_resolve_reference_action=dump_resolve_reference_action,
+            validation=validation,
+            field_constraints=field_constraints,
+            snake_case_field=snake_case_field,
+            strip_default_none=strip_default_none,
+            aliases=aliases,
+            allow_population_by_field_name=allow_population_by_field_name,
+            allow_extra_fields=allow_extra_fields,
+            extra_fields=extra_fields,
+            apply_default_values_for_required_fields=apply_default_values_for_required_fields,
+            force_optional_for_required_fields=force_optional_for_required_fields,
+            class_name=class_name,
+            use_standard_collections=use_standard_collections,
+            base_path=base_path,
+            use_schema_description=use_schema_description,
+            use_field_description=use_field_description,
+            use_default_kwarg=use_default_kwarg,
+            reuse_model=reuse_model,
+            encoding=encoding,
+            enum_field_as_literal=enum_field_as_literal,
+            use_one_literal_as_default=use_one_literal_as_default,
+            set_default_enum_member=set_default_enum_member,
+            use_subclass_enum=use_subclass_enum,
+            strict_nullable=strict_nullable,
+            use_generic_container_types=use_generic_container_types,
+            enable_faux_immutability=enable_faux_immutability,
+            remote_text_cache=remote_text_cache,
+            disable_appending_item_suffix=disable_appending_item_suffix,
+            strict_types=strict_types,
+            empty_enum_field_name=empty_enum_field_name,
+            custom_class_name_generator=custom_class_name_generator,
+            field_extra_keys=field_extra_keys,
+            field_include_all_keys=field_include_all_keys,
+            field_extra_keys_without_x_prefix=field_extra_keys_without_x_prefix,
+            wrap_string_literal=wrap_string_literal,
+            use_title_as_name=use_title_as_name,
+            use_operation_id_as_name=use_operation_id_as_name,
+            use_unique_items_as_set=use_unique_items_as_set,
+            http_headers=http_headers,
+            http_ignore_tls=http_ignore_tls,
+            use_annotated=use_annotated,
+            use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
+            original_field_name_delimiter=original_field_name_delimiter,
+            use_double_quotes=use_double_quotes,
+            use_union_operator=use_union_operator,
+            allow_responses_without_content=allow_responses_without_content,
+            collapse_root_models=collapse_root_models,
+            special_field_name_prefix=special_field_name_prefix,
+            remove_special_field_name_prefix=remove_special_field_name_prefix,
+            capitalise_enum_members=capitalise_enum_members,
+            keep_model_order=keep_model_order,
+            known_third_party=known_third_party,
+            custom_formatters=custom_formatters,
+            custom_formatters_kwargs=custom_formatters_kwargs,
+            use_pendulum=use_pendulum,
+            http_query_parameters=http_query_parameters,
+            treat_dot_as_module=treat_dot_as_module,
+            use_exact_imports=use_exact_imports,
+            default_field_extras=default_field_extras,
+            target_datetime_class=target_datetime_class,
+            keyword_only=keyword_only,
+            frozen_dataclasses=frozen_dataclasses,
+            no_alias=no_alias,
+            formatters=formatters,
+            parent_scoped_naming=parent_scoped_naming,
+        )
+
+        self.remote_object_cache: DefaultPutDict[str, dict[str, Any]] = DefaultPutDict()
+        self.raw_obj: dict[Any, Any] = {}
+        self._root_id: Optional[str] = None  # noqa: UP045
+        self._root_id_base_path: Optional[str] = None  # noqa: UP045
+        self.reserved_refs: defaultdict[tuple[str, ...], set[str]] = defaultdict(set)
+        self.field_keys: set[str] = {
+            *DEFAULT_FIELD_KEYS,
+            *self.field_extra_keys,
+            *self.field_extra_keys_without_x_prefix,
+        }
+
+        if self.data_model_field_type.can_have_extra_keys:
+            self.get_field_extra_key: Callable[[str], str] = (
+                lambda key: self.model_resolver.get_valid_field_name_and_alias(key)[0]
+            )
+
+        else:
+            self.get_field_extra_key = lambda key: key
+
+    def get_field_extras(self, obj: JsonSchemaObject) -> dict[str, Any]:
+        if self.field_include_all_keys:
+            extras = {
+                self.get_field_extra_key(k.lstrip("x-") if k in self.field_extra_keys_without_x_prefix else k): v
+                for k, v in obj.extras.items()
+            }
+        else:
+            extras = {
+                self.get_field_extra_key(k.lstrip("x-") if k in self.field_extra_keys_without_x_prefix else k): v
+                for k, v in obj.extras.items()
+                if k in self.field_keys
+            }
+        if self.default_field_extras:
+            extras.update(self.default_field_extras)
+        return extras
+
+    @cached_property
+    def schema_paths(self) -> list[tuple[str, list[str]]]:
+        return [(s, s.lstrip("#/").split("/")) for s in self.SCHEMA_PATHS]
+
+    @property
+    def root_id(self) -> str | None:
+        return self.model_resolver.root_id
+
+    @root_id.setter
+    def root_id(self, value: str | None) -> None:
+        self.model_resolver.set_root_id(value)
+
+    def should_parse_enum_as_literal(self, obj: JsonSchemaObject) -> bool:
+        return self.enum_field_as_literal == LiteralType.All or (
+            self.enum_field_as_literal == LiteralType.One and len(obj.enum) == 1
+        )
+
+    def is_constraints_field(self, obj: JsonSchemaObject) -> bool:
+        return obj.is_array or (
+            self.field_constraints and not (obj.ref or obj.anyOf or obj.oneOf or obj.allOf or obj.is_object or obj.enum)
+        )
+
+    def get_object_field(  # noqa: PLR0913
+        self,
+        *,
+        field_name: str | None,
+        field: JsonSchemaObject,
+        required: bool,
+        field_type: DataType,
+        alias: str | None,
+        original_field_name: str | None,
+    ) -> DataModelFieldBase:
+        return self.data_model_field_type(
+            name=field_name,
+            default=field.default,
+            data_type=field_type,
+            required=required,
+            alias=alias,
+            constraints=field.dict() if self.is_constraints_field(field) else None,
+            nullable=field.nullable if self.strict_nullable and (field.has_default or required) else None,
+            strip_default_none=self.strip_default_none,
+            extras=self.get_field_extras(field),
+            use_annotated=self.use_annotated,
+            use_field_description=self.use_field_description,
+            use_default_kwarg=self.use_default_kwarg,
+            original_name=original_field_name,
+            has_default=field.has_default,
+            type_has_null=field.type_has_null,
+        )
+
+    def get_data_type(self, obj: JsonSchemaObject) -> DataType:
+        if obj.type is None:
+            if "const" in obj.extras:
+                return self.data_type_manager.get_data_type_from_value(obj.extras["const"])
+            return self.data_type_manager.get_data_type(
+                Types.any,
+            )
+
+        def _get_data_type(type_: str, format__: str) -> DataType:
+            return self.data_type_manager.get_data_type(
+                _get_type(type_, format__),
+                **obj.dict() if not self.field_constraints else {},
+            )
+
+        if isinstance(obj.type, list):
+            return self.data_type(
+                data_types=[_get_data_type(t, obj.format or "default") for t in obj.type if t != "null"],
+                is_optional="null" in obj.type,
+            )
+        return _get_data_type(obj.type, obj.format or "default")
+
+    def get_ref_data_type(self, ref: str) -> DataType:
+        reference = self.model_resolver.add_ref(ref)
+        return self.data_type(reference=reference)
+
+    def set_additional_properties(self, path: str, obj: JsonSchemaObject) -> None:
+        if isinstance(obj.additionalProperties, bool):
+            self.extra_template_data[path]["additionalProperties"] = obj.additionalProperties
+
+    def set_title(self, path: str, obj: JsonSchemaObject) -> None:
+        if obj.title:
+            self.extra_template_data[path]["title"] = obj.title
+
+    def _deep_merge(self, dict1: dict[Any, Any], dict2: dict[Any, Any]) -> dict[Any, Any]:
+        result = dict1.copy()
+        for key, value in dict2.items():
+            if key in result:
+                if isinstance(result[key], dict) and isinstance(value, dict):
+                    result[key] = self._deep_merge(result[key], value)
+                    continue
+                if isinstance(result[key], list) and isinstance(value, list):
+                    result[key] = result[key] + value  # noqa: PLR6104
+                    continue
+            result[key] = value
+        return result
+
+    def parse_combined_schema(
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+        target_attribute_name: str,
+    ) -> list[DataType]:
+        base_object = obj.dict(exclude={target_attribute_name}, exclude_unset=True, by_alias=True)
+        combined_schemas: list[JsonSchemaObject] = []
+        refs = []
+        for index, target_attribute in enumerate(getattr(obj, target_attribute_name, [])):
+            if target_attribute.ref:
+                combined_schemas.append(target_attribute)
+                refs.append(index)
+                # TODO: support partial ref
+            else:
+                combined_schemas.append(
+                    self.SCHEMA_OBJECT_TYPE.parse_obj(
+                        self._deep_merge(
+                            base_object,
+                            target_attribute.dict(exclude_unset=True, by_alias=True),
+                        )
+                    )
+                )
+
+        parsed_schemas = self.parse_list_item(
+            name,
+            combined_schemas,
+            path,
+            obj,
+            singular_name=False,
+        )
+        common_path_keyword = f"{target_attribute_name}Common"
+        return [
+            self._parse_object_common_part(
+                name,
+                obj,
+                [*get_special_path(common_path_keyword, path), str(i)],
+                ignore_duplicate_model=True,
+                fields=[],
+                base_classes=[d.reference],
+                required=[],
+            )
+            if i in refs and d.reference
+            else d
+            for i, d in enumerate(parsed_schemas)
+        ]
+
+    def parse_any_of(self, name: str, obj: JsonSchemaObject, path: list[str]) -> list[DataType]:
+        return self.parse_combined_schema(name, obj, path, "anyOf")
+
+    def parse_one_of(self, name: str, obj: JsonSchemaObject, path: list[str]) -> list[DataType]:
+        return self.parse_combined_schema(name, obj, path, "oneOf")
+
+    def _create_data_model(self, model_type: type[DataModel] | None = None, **kwargs: Any) -> DataModel:
+        """Create data model instance with conditional frozen parameter for DataClass."""
+        data_model_class = model_type or self.data_model_type
+        if issubclass(data_model_class, DataClass):
+            kwargs["frozen"] = self.frozen_dataclasses
+        return data_model_class(**kwargs)
+
+    def _parse_object_common_part(  # noqa: PLR0913, PLR0917
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+        ignore_duplicate_model: bool,  # noqa: FBT001
+        fields: list[DataModelFieldBase],
+        base_classes: list[Reference],
+        required: list[str],
+    ) -> DataType:
+        if obj.properties:
+            fields.extend(
+                self.parse_object_fields(
+                    obj, path, get_module_name(name, None, treat_dot_as_module=self.treat_dot_as_module)
+                )
+            )
+        # ignore an undetected object
+        if ignore_duplicate_model and not fields and len(base_classes) == 1:
+            with self.model_resolver.current_base_path_context(self.model_resolver._base_path):  # noqa: SLF001
+                self.model_resolver.delete(path)
+                return self.data_type(reference=base_classes[0])
+        if required:
+            for field in fields:
+                if self.force_optional_for_required_fields or (  # pragma: no cover
+                    self.apply_default_values_for_required_fields and field.has_default
+                ):
+                    continue  # pragma: no cover
+                if (field.original_name or field.name) in required:
+                    field.required = True
+        if obj.required:
+            field_name_to_field = {f.original_name or f.name: f for f in fields}
+            for required_ in obj.required:
+                if required_ in field_name_to_field:
+                    field = field_name_to_field[required_]
+                    if self.force_optional_for_required_fields or (
+                        self.apply_default_values_for_required_fields and field.has_default
+                    ):
+                        continue
+                    field.required = True
+                else:
+                    fields.append(
+                        self.data_model_field_type(required=True, original_name=required_, data_type=DataType())
+                    )
+        if self.use_title_as_name and obj.title:  # pragma: no cover
+            name = obj.title
+        reference = self.model_resolver.add(path, name, class_name=True, loaded=True)
+        self.set_additional_properties(reference.path, obj)
+
+        data_model_type = self._create_data_model(
+            reference=reference,
+            fields=fields,
+            base_classes=base_classes,
+            custom_base_class=obj.custom_base_path or self.base_class,
+            custom_template_dir=self.custom_template_dir,
+            extra_template_data=self.extra_template_data,
+            path=self.current_source_path,
+            description=obj.description if self.use_schema_description else None,
+            keyword_only=self.keyword_only,
+            treat_dot_as_module=self.treat_dot_as_module,
+        )
+        self.results.append(data_model_type)
+
+        return self.data_type(reference=reference)
+
+    def _parse_all_of_item(  # noqa: PLR0913, PLR0917
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+        fields: list[DataModelFieldBase],
+        base_classes: list[Reference],
+        required: list[str],
+        union_models: list[Reference],
+    ) -> None:
+        for all_of_item in obj.allOf:
+            if all_of_item.ref:  # $ref
+                base_classes.append(self.model_resolver.add_ref(all_of_item.ref))
+            else:
+                module_name = get_module_name(name, None, treat_dot_as_module=self.treat_dot_as_module)
+                object_fields = self.parse_object_fields(
+                    all_of_item,
+                    path,
+                    module_name,
+                )
+
+                if object_fields:
+                    fields.extend(object_fields)
+                elif all_of_item.required:
+                    required.extend(all_of_item.required)
+                self._parse_all_of_item(
+                    name,
+                    all_of_item,
+                    path,
+                    fields,
+                    base_classes,
+                    required,
+                    union_models,
+                )
+                if all_of_item.anyOf:
+                    self.model_resolver.add(path, name, class_name=True, loaded=True)
+                    union_models.extend(d.reference for d in self.parse_any_of(name, all_of_item, path) if d.reference)
+                if all_of_item.oneOf:
+                    self.model_resolver.add(path, name, class_name=True, loaded=True)
+                    union_models.extend(d.reference for d in self.parse_one_of(name, all_of_item, path) if d.reference)
+
+    def parse_all_of(
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+        ignore_duplicate_model: bool = False,  # noqa: FBT001, FBT002
+    ) -> DataType:
+        if len(obj.allOf) == 1 and not obj.properties:
+            single_obj = obj.allOf[0]
+            if (
+                single_obj.ref
+                and single_obj.ref_type == JSONReference.LOCAL
+                and get_model_by_path(self.raw_obj, single_obj.ref[2:].split("/")).get("enum")
+            ):
+                return self.get_ref_data_type(single_obj.ref)
+        fields: list[DataModelFieldBase] = []
+        base_classes: list[Reference] = []
+        required: list[str] = []
+        union_models: list[Reference] = []
+        self._parse_all_of_item(name, obj, path, fields, base_classes, required, union_models)
+        if not union_models:
+            return self._parse_object_common_part(
+                name, obj, path, ignore_duplicate_model, fields, base_classes, required
+            )
+        reference = self.model_resolver.add(path, name, class_name=True, loaded=True)
+        all_of_data_type = self._parse_object_common_part(
+            name,
+            obj,
+            get_special_path("allOf", path),
+            ignore_duplicate_model,
+            fields,
+            base_classes,
+            required,
+        )
+        assert all_of_data_type.reference is not None
+        data_type = self.data_type(
+            data_types=[
+                self._parse_object_common_part(
+                    name,
+                    obj,
+                    get_special_path(f"union_model-{index}", path),
+                    ignore_duplicate_model,
+                    [],
+                    [union_model, all_of_data_type.reference],
+                    [],
+                )
+                for index, union_model in enumerate(union_models)
+            ]
+        )
+        field = self.get_object_field(
+            field_name=None,
+            field=obj,
+            required=True,
+            field_type=data_type,
+            alias=None,
+            original_field_name=None,
+        )
+        data_model_root = self.data_model_root_type(
+            reference=reference,
+            fields=[field],
+            custom_base_class=obj.custom_base_path or self.base_class,
+            custom_template_dir=self.custom_template_dir,
+            extra_template_data=self.extra_template_data,
+            path=self.current_source_path,
+            description=obj.description if self.use_schema_description else None,
+            nullable=obj.type_has_null,
+            treat_dot_as_module=self.treat_dot_as_module,
+        )
+        self.results.append(data_model_root)
+        return self.data_type(reference=reference)
+
+    def parse_object_fields(
+        self,
+        obj: JsonSchemaObject,
+        path: list[str],
+        module_name: Optional[str] = None,  # noqa: UP045
+    ) -> list[DataModelFieldBase]:
+        properties: dict[str, JsonSchemaObject | bool] = {} if obj.properties is None else obj.properties
+        requires: set[str] = {*()} if obj.required is None else {*obj.required}
+        fields: list[DataModelFieldBase] = []
+
+        exclude_field_names: set[str] = set()
+        for original_field_name, field in properties.items():
+            field_name, alias = self.model_resolver.get_valid_field_name_and_alias(
+                original_field_name, excludes=exclude_field_names
+            )
+            modular_name = f"{module_name}.{field_name}" if module_name else field_name
+
+            exclude_field_names.add(field_name)
+
+            if isinstance(field, bool):
+                fields.append(
+                    self.data_model_field_type(
+                        name=field_name,
+                        data_type=self.data_type_manager.get_data_type(
+                            Types.any,
+                        ),
+                        required=False if self.force_optional_for_required_fields else original_field_name in requires,
+                        alias=alias,
+                        strip_default_none=self.strip_default_none,
+                        use_annotated=self.use_annotated,
+                        use_field_description=self.use_field_description,
+                        original_name=original_field_name,
+                    )
+                )
+                continue
+
+            field_type = self.parse_item(modular_name, field, [*path, field_name])
+
+            if self.force_optional_for_required_fields or (
+                self.apply_default_values_for_required_fields and field.has_default
+            ):
+                required: bool = False
+            else:
+                required = original_field_name in requires
+            fields.append(
+                self.get_object_field(
+                    field_name=field_name,
+                    field=field,
+                    required=required,
+                    field_type=field_type,
+                    alias=alias,
+                    original_field_name=original_field_name,
+                )
+            )
+        return fields
+
+    def parse_object(
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+        singular_name: bool = False,  # noqa: FBT001, FBT002
+        unique: bool = True,  # noqa: FBT001, FBT002
+    ) -> DataType:
+        if not unique:  # pragma: no cover
+            warn(
+                f"{self.__class__.__name__}.parse_object() ignore `unique` argument."
+                f"An object name must be unique."
+                f"This argument will be removed in a future version",
+                stacklevel=2,
+            )
+        if self.use_title_as_name and obj.title:
+            name = obj.title
+        reference = self.model_resolver.add(
+            path,
+            name,
+            class_name=True,
+            singular_name=singular_name,
+            loaded=True,
+        )
+        class_name = reference.name
+        self.set_title(reference.path, obj)
+        fields = self.parse_object_fields(
+            obj, path, get_module_name(class_name, None, treat_dot_as_module=self.treat_dot_as_module)
+        )
+        if fields or not isinstance(obj.additionalProperties, JsonSchemaObject):
+            data_model_type_class = self.data_model_type
+        else:
+            fields.append(
+                self.get_object_field(
+                    field_name=None,
+                    field=obj.additionalProperties,
+                    required=True,
+                    original_field_name=None,
+                    field_type=self.data_type(
+                        data_types=[
+                            self.parse_item(
+                                # TODO: Improve naming for nested ClassName
+                                name,
+                                obj.additionalProperties,
+                                [*path, "additionalProperties"],
+                            )
+                        ],
+                        is_dict=True,
+                    ),
+                    alias=None,
+                )
+            )
+            data_model_type_class = self.data_model_root_type
+
+        self.set_additional_properties(reference.path, obj)
+
+        data_model_type = self._create_data_model(
+            model_type=data_model_type_class,
+            reference=reference,
+            fields=fields,
+            custom_base_class=obj.custom_base_path or self.base_class,
+            custom_template_dir=self.custom_template_dir,
+            extra_template_data=self.extra_template_data,
+            path=self.current_source_path,
+            description=obj.description if self.use_schema_description else None,
+            nullable=obj.type_has_null,
+            keyword_only=self.keyword_only,
+            treat_dot_as_module=self.treat_dot_as_module,
+        )
+        self.results.append(data_model_type)
+        return self.data_type(reference=reference)
+
+    def parse_pattern_properties(
+        self,
+        name: str,
+        pattern_properties: dict[str, JsonSchemaObject],
+        path: list[str],
+    ) -> DataType:
+        return self.data_type(
+            data_types=[
+                self.data_type(
+                    data_types=[
+                        self.parse_item(
+                            name,
+                            kv[1],
+                            get_special_path(f"patternProperties/{i}", path),
+                        )
+                    ],
+                    is_dict=True,
+                    dict_key=self.data_type_manager.get_data_type(
+                        Types.string,
+                        pattern=kv[0] if not self.field_constraints else None,
+                    ),
+                )
+                for i, kv in enumerate(pattern_properties.items())
+            ],
+        )
+
+    def parse_item(  # noqa: PLR0911, PLR0912
+        self,
+        name: str,
+        item: JsonSchemaObject,
+        path: list[str],
+        singular_name: bool = False,  # noqa: FBT001, FBT002
+        parent: JsonSchemaObject | None = None,
+    ) -> DataType:
+        if self.use_title_as_name and item.title:
+            name = item.title
+            singular_name = False
+        if parent and not item.enum and item.has_constraint and (parent.has_constraint or self.field_constraints):
+            root_type_path = get_special_path("array", path)
+            return self.parse_root_type(
+                self.model_resolver.add(
+                    root_type_path,
+                    name,
+                    class_name=True,
+                    singular_name=singular_name,
+                ).name,
+                item,
+                root_type_path,
+            )
+        if item.ref:
+            return self.get_ref_data_type(item.ref)
+        if item.custom_type_path:
+            return self.data_type_manager.get_data_type_from_full_path(item.custom_type_path, is_custom_type=True)
+        if item.is_array:
+            return self.parse_array_fields(name, item, get_special_path("array", path)).data_type
+        if item.discriminator and parent and parent.is_array and (item.oneOf or item.anyOf):
+            return self.parse_root_type(name, item, path)
+        if item.anyOf:
+            return self.data_type(data_types=self.parse_any_of(name, item, get_special_path("anyOf", path)))
+        if item.oneOf:
+            return self.data_type(data_types=self.parse_one_of(name, item, get_special_path("oneOf", path)))
+        if item.allOf:
+            all_of_path = get_special_path("allOf", path)
+            all_of_path = [self.model_resolver.resolve_ref(all_of_path)]
+            return self.parse_all_of(
+                self.model_resolver.add(all_of_path, name, singular_name=singular_name, class_name=True).name,
+                item,
+                all_of_path,
+                ignore_duplicate_model=True,
+            )
+        if item.is_object or item.patternProperties:
+            object_path = get_special_path("object", path)
+            if item.properties:
+                return self.parse_object(name, item, object_path, singular_name=singular_name)
+            if item.patternProperties:
+                # support only single key dict.
+                return self.parse_pattern_properties(name, item.patternProperties, object_path)
+            if isinstance(item.additionalProperties, JsonSchemaObject):
+                return self.data_type(
+                    data_types=[self.parse_item(name, item.additionalProperties, object_path)],
+                    is_dict=True,
+                )
+            return self.data_type_manager.get_data_type(
+                Types.object,
+            )
+        if item.enum:
+            if self.should_parse_enum_as_literal(item):
+                return self.parse_enum_as_literal(item)
+            return self.parse_enum(name, item, get_special_path("enum", path), singular_name=singular_name)
+        return self.get_data_type(item)
+
+    def parse_list_item(
+        self,
+        name: str,
+        target_items: list[JsonSchemaObject],
+        path: list[str],
+        parent: JsonSchemaObject,
+        singular_name: bool = True,  # noqa: FBT001, FBT002
+    ) -> list[DataType]:
+        return [
+            self.parse_item(
+                name,
+                item,
+                [*path, str(index)],
+                singular_name=singular_name,
+                parent=parent,
+            )
+            for index, item in enumerate(target_items)
+        ]
+
+    def parse_array_fields(
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+        singular_name: bool = True,  # noqa: FBT001, FBT002
+    ) -> DataModelFieldBase:
+        if self.force_optional_for_required_fields:
+            required: bool = False
+            nullable: Optional[bool] = None  # noqa: UP045
+        else:
+            required = not (obj.has_default and self.apply_default_values_for_required_fields)
+            if self.strict_nullable:
+                nullable = obj.nullable if obj.has_default or required else True
+            else:
+                required = not obj.nullable and required
+                nullable = None
+        if isinstance(obj.items, JsonSchemaObject):
+            items: list[JsonSchemaObject] = [obj.items]
+        elif isinstance(obj.items, list):
+            items = obj.items
+        else:
+            items = []
+
+        data_types: list[DataType] = [
+            self.data_type(
+                data_types=self.parse_list_item(
+                    name,
+                    items,
+                    path,
+                    obj,
+                    singular_name=singular_name,
+                ),
+                is_list=True,
+            )
+        ]
+        # TODO: decide special path word for a combined data model.
+        if obj.allOf:
+            data_types.append(self.parse_all_of(name, obj, get_special_path("allOf", path)))
+        elif obj.is_object:
+            data_types.append(self.parse_object(name, obj, get_special_path("object", path)))
+        if obj.enum:
+            data_types.append(self.parse_enum(name, obj, get_special_path("enum", path)))
+        return self.data_model_field_type(
+            data_type=self.data_type(data_types=data_types),
+            default=obj.default,
+            required=required,
+            constraints=obj.dict(),
+            nullable=nullable,
+            strip_default_none=self.strip_default_none,
+            extras=self.get_field_extras(obj),
+            use_annotated=self.use_annotated,
+            use_field_description=self.use_field_description,
+            original_name=None,
+            has_default=obj.has_default,
+        )
+
+    def parse_array(
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+        original_name: str | None = None,
+    ) -> DataType:
+        if self.use_title_as_name and obj.title:
+            name = obj.title
+        reference = self.model_resolver.add(path, name, loaded=True, class_name=True)
+        field = self.parse_array_fields(original_name or name, obj, [*path, name])
+
+        if reference in [d.reference for d in field.data_type.all_data_types if d.reference]:
+            # self-reference
+            field = self.data_model_field_type(
+                data_type=self.data_type(
+                    data_types=[
+                        self.data_type(data_types=field.data_type.data_types[1:], is_list=True),
+                        *field.data_type.data_types[1:],
+                    ]
+                ),
+                default=field.default,
+                required=field.required,
+                constraints=field.constraints,
+                nullable=field.nullable,
+                strip_default_none=field.strip_default_none,
+                extras=field.extras,
+                use_annotated=self.use_annotated,
+                use_field_description=self.use_field_description,
+                original_name=None,
+                has_default=field.has_default,
+            )
+
+        data_model_root = self.data_model_root_type(
+            reference=reference,
+            fields=[field],
+            custom_base_class=obj.custom_base_path or self.base_class,
+            custom_template_dir=self.custom_template_dir,
+            extra_template_data=self.extra_template_data,
+            path=self.current_source_path,
+            description=obj.description if self.use_schema_description else None,
+            nullable=obj.type_has_null,
+            treat_dot_as_module=self.treat_dot_as_module,
+        )
+        self.results.append(data_model_root)
+        return self.data_type(reference=reference)
+
+    def parse_root_type(  # noqa: PLR0912
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+    ) -> DataType:
+        reference: Reference | None = None
+        if obj.ref:
+            data_type: DataType = self.get_ref_data_type(obj.ref)
+        elif obj.custom_type_path:
+            data_type = self.data_type_manager.get_data_type_from_full_path(
+                obj.custom_type_path, is_custom_type=True
+            )  # pragma: no cover
+        elif obj.is_array:
+            data_type = self.parse_array_fields(
+                name, obj, get_special_path("array", path)
+            ).data_type  # pragma: no cover
+        elif obj.anyOf or obj.oneOf:
+            reference = self.model_resolver.add(path, name, loaded=True, class_name=True)
+            if obj.anyOf:
+                data_types: list[DataType] = self.parse_any_of(name, obj, get_special_path("anyOf", path))
+            else:
+                data_types = self.parse_one_of(name, obj, get_special_path("oneOf", path))
+
+            if len(data_types) > 1:  # pragma: no cover
+                data_type = self.data_type(data_types=data_types)
+            elif not data_types:  # pragma: no cover
+                return EmptyDataType()
+            else:  # pragma: no cover
+                data_type = data_types[0]
+        elif obj.patternProperties:
+            data_type = self.parse_pattern_properties(name, obj.patternProperties, path)
+        elif obj.enum:
+            if self.should_parse_enum_as_literal(obj):
+                data_type = self.parse_enum_as_literal(obj)
+            else:  # pragma: no cover
+                data_type = self.parse_enum(name, obj, path)
+        elif obj.type:
+            data_type = self.get_data_type(obj)
+        else:
+            data_type = self.data_type_manager.get_data_type(
+                Types.any,
+            )
+        if self.force_optional_for_required_fields:
+            required: bool = False
+        else:
+            required = not obj.nullable and not (obj.has_default and self.apply_default_values_for_required_fields)
+        if self.use_title_as_name and obj.title:
+            name = obj.title
+        if not reference:
+            reference = self.model_resolver.add(path, name, loaded=True, class_name=True)
+        self.set_title(reference.path, obj)
+        self.set_additional_properties(reference.path, obj)
+        data_model_root_type = self.data_model_root_type(
+            reference=reference,
+            fields=[
+                self.data_model_field_type(
+                    data_type=data_type,
+                    default=obj.default,
+                    required=required,
+                    constraints=obj.dict() if self.field_constraints else {},
+                    nullable=obj.nullable if self.strict_nullable else None,
+                    strip_default_none=self.strip_default_none,
+                    extras=self.get_field_extras(obj),
+                    use_annotated=self.use_annotated,
+                    use_field_description=self.use_field_description,
+                    original_name=None,
+                    has_default=obj.has_default,
+                )
+            ],
+            custom_base_class=obj.custom_base_path or self.base_class,
+            custom_template_dir=self.custom_template_dir,
+            extra_template_data=self.extra_template_data,
+            path=self.current_source_path,
+            nullable=obj.type_has_null,
+            treat_dot_as_module=self.treat_dot_as_module,
+            default=obj.default if obj.has_default else UNDEFINED,
+        )
+        self.results.append(data_model_root_type)
+        return self.data_type(reference=reference)
+
+    def parse_enum_as_literal(self, obj: JsonSchemaObject) -> DataType:
+        return self.data_type(literals=[i for i in obj.enum if i is not None])
+
+    def parse_enum(
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+        singular_name: bool = False,  # noqa: FBT001, FBT002
+        unique: bool = True,  # noqa: FBT001, FBT002
+    ) -> DataType:
+        if not unique:  # pragma: no cover
+            warn(
+                f"{self.__class__.__name__}.parse_enum() ignore `unique` argument."
+                f"An object name must be unique."
+                f"This argument will be removed in a future version",
+                stacklevel=2,
+            )
+        enum_fields: list[DataModelFieldBase] = []
+
+        if None in obj.enum and obj.type == "string":
+            # Nullable is valid in only OpenAPI
+            nullable: bool = True
+            enum_times = [e for e in obj.enum if e is not None]
+        else:
+            enum_times = obj.enum
+            nullable = False
+
+        exclude_field_names: set[str] = set()
+
+        for i, enum_part in enumerate(enum_times):
+            if obj.type == "string" or isinstance(enum_part, str):
+                default = f"'{enum_part.translate(escape_characters)}'" if isinstance(enum_part, str) else enum_part
+                field_name = obj.x_enum_varnames[i] if obj.x_enum_varnames else str(enum_part)
+            else:
+                default = enum_part
+                if obj.x_enum_varnames:
+                    field_name = obj.x_enum_varnames[i]
+                else:
+                    prefix = obj.type if isinstance(obj.type, str) else type(enum_part).__name__
+                    field_name = f"{prefix}_{enum_part}"
+            field_name = self.model_resolver.get_valid_field_name(
+                field_name, excludes=exclude_field_names, model_type=ModelType.ENUM
+            )
+            exclude_field_names.add(field_name)
+            enum_fields.append(
+                self.data_model_field_type(
+                    name=field_name,
+                    default=default,
+                    data_type=self.data_type_manager.get_data_type(
+                        Types.any,
+                    ),
+                    required=True,
+                    strip_default_none=self.strip_default_none,
+                    has_default=obj.has_default,
+                    use_field_description=self.use_field_description,
+                    original_name=None,
+                )
+            )
+
+        def create_enum(reference_: Reference) -> DataType:
+            enum = Enum(
+                reference=reference_,
+                fields=enum_fields,
+                path=self.current_source_path,
+                description=obj.description if self.use_schema_description else None,
+                custom_template_dir=self.custom_template_dir,
+                type_=_get_type(obj.type, obj.format) if self.use_subclass_enum and isinstance(obj.type, str) else None,
+                default=obj.default if obj.has_default else UNDEFINED,
+                treat_dot_as_module=self.treat_dot_as_module,
+            )
+            self.results.append(enum)
+            return self.data_type(reference=reference_)
+
+        if self.use_title_as_name and obj.title:
+            name = obj.title
+        reference = self.model_resolver.add(
+            path,
+            name,
+            class_name=True,
+            singular_name=singular_name,
+            singular_name_suffix="Enum",
+            loaded=True,
+        )
+
+        if not nullable:
+            return create_enum(reference)
+
+        enum_reference = self.model_resolver.add(
+            [*path, "Enum"],
+            f"{reference.name}Enum",
+            class_name=True,
+            singular_name=singular_name,
+            singular_name_suffix="Enum",
+            loaded=True,
+        )
+
+        data_model_root_type = self.data_model_root_type(
+            reference=reference,
+            fields=[
+                self.data_model_field_type(
+                    data_type=create_enum(enum_reference),
+                    default=obj.default,
+                    required=False,
+                    nullable=True,
+                    strip_default_none=self.strip_default_none,
+                    extras=self.get_field_extras(obj),
+                    use_annotated=self.use_annotated,
+                    has_default=obj.has_default,
+                    use_field_description=self.use_field_description,
+                    original_name=None,
+                )
+            ],
+            custom_base_class=obj.custom_base_path or self.base_class,
+            custom_template_dir=self.custom_template_dir,
+            extra_template_data=self.extra_template_data,
+            path=self.current_source_path,
+            default=obj.default if obj.has_default else UNDEFINED,
+            nullable=obj.type_has_null,
+            treat_dot_as_module=self.treat_dot_as_module,
+        )
+        self.results.append(data_model_root_type)
+        return self.data_type(reference=reference)
+
+    def _get_ref_body(self, resolved_ref: str) -> dict[Any, Any]:
+        if is_url(resolved_ref):
+            return self._get_ref_body_from_url(resolved_ref)
+        return self._get_ref_body_from_remote(resolved_ref)
+
+    def _get_ref_body_from_url(self, ref: str) -> dict[Any, Any]:
+        # URL Reference: $ref: 'http://path/to/your/resource' Uses the whole document located on the different server.
+        return self.remote_object_cache.get_or_put(
+            ref, default_factory=lambda key: load_yaml(self._get_text_from_url(key))
+        )
+
+    def _get_ref_body_from_remote(self, resolved_ref: str) -> dict[Any, Any]:
+        # Remote Reference: $ref: 'document.json' Uses the whole document located on the same server and in
+        # the same location. TODO treat edge case
+        full_path = self.base_path / resolved_ref
+
+        return self.remote_object_cache.get_or_put(
+            str(full_path),
+            default_factory=lambda _: load_yaml_from_path(full_path, self.encoding),
+        )
+
+    def resolve_ref(self, object_ref: str) -> Reference:
+        reference = self.model_resolver.add_ref(object_ref)
+        if reference.loaded:
+            return reference
+
+        # https://swagger.io/docs/specification/using-ref/
+        ref = self.model_resolver.resolve_ref(object_ref)
+        if get_ref_type(object_ref) == JSONReference.LOCAL:
+            # Local Reference: $ref: '#/definitions/myElement'
+            self.reserved_refs[tuple(self.model_resolver.current_root)].add(ref)
+            return reference
+        if self.model_resolver.is_after_load(ref):
+            self.reserved_refs[tuple(ref.split("#")[0].split("/"))].add(ref)
+            return reference
+
+        if is_url(ref):
+            relative_path, object_path = ref.split("#")
+            relative_paths = [relative_path]
+            base_path = None
+        else:
+            if self.model_resolver.is_external_root_ref(ref):
+                relative_path, object_path = ref[:-1], ""
+            else:
+                relative_path, object_path = ref.split("#")
+            relative_paths = relative_path.split("/")
+            base_path = Path(*relative_paths).parent
+        with (
+            self.model_resolver.current_base_path_context(base_path),
+            self.model_resolver.base_url_context(relative_path),
+        ):
+            self._parse_file(
+                self._get_ref_body(relative_path),
+                self.model_resolver.add_ref(ref, resolved=True).name,
+                relative_paths,
+                object_path.split("/") if object_path else None,
+            )
+        reference.loaded = True
+        return reference
+
+    def parse_ref(self, obj: JsonSchemaObject, path: list[str]) -> None:  # noqa: PLR0912
+        if obj.ref:
+            self.resolve_ref(obj.ref)
+        if obj.items:
+            if isinstance(obj.items, JsonSchemaObject):
+                self.parse_ref(obj.items, path)
+            elif isinstance(obj.items, list):
+                for item in obj.items:
+                    self.parse_ref(item, path)
+        if isinstance(obj.additionalProperties, JsonSchemaObject):
+            self.parse_ref(obj.additionalProperties, path)
+        if obj.patternProperties:
+            for value in obj.patternProperties.values():
+                self.parse_ref(value, path)
+        for item in obj.anyOf:
+            self.parse_ref(item, path)
+        for item in obj.allOf:
+            self.parse_ref(item, path)
+        for item in obj.oneOf:
+            self.parse_ref(item, path)
+        if obj.properties:
+            for property_value in obj.properties.values():
+                if isinstance(property_value, JsonSchemaObject):
+                    self.parse_ref(property_value, path)
+
+    def parse_id(self, obj: JsonSchemaObject, path: list[str]) -> None:  # noqa: PLR0912
+        if obj.id:
+            self.model_resolver.add_id(obj.id, path)
+        if obj.items:
+            if isinstance(obj.items, JsonSchemaObject):
+                self.parse_id(obj.items, path)
+            elif isinstance(obj.items, list):
+                for item in obj.items:
+                    self.parse_id(item, path)
+        if isinstance(obj.additionalProperties, JsonSchemaObject):
+            self.parse_id(obj.additionalProperties, path)
+        if obj.patternProperties:
+            for value in obj.patternProperties.values():
+                self.parse_id(value, path)
+        for item in obj.anyOf:
+            self.parse_id(item, path)
+        for item in obj.allOf:
+            self.parse_id(item, path)
+        if obj.properties:
+            for property_value in obj.properties.values():
+                if isinstance(property_value, JsonSchemaObject):
+                    self.parse_id(property_value, path)
+
+    @contextmanager
+    def root_id_context(self, root_raw: dict[str, Any]) -> Generator[None, None, None]:
+        root_id: str | None = root_raw.get("$id")
+        previous_root_id: str | None = self.root_id
+        self.root_id = root_id or None
+        yield
+        self.root_id = previous_root_id
+
+    def parse_raw_obj(
+        self,
+        name: str,
+        raw: dict[str, Any],
+        path: list[str],
+    ) -> None:
+        self.parse_obj(name, self.SCHEMA_OBJECT_TYPE.model_validate(raw), path)
+
+    def parse_obj(
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+    ) -> None:
+        if obj.is_array:
+            self.parse_array(name, obj, path)
+        elif obj.allOf:
+            self.parse_all_of(name, obj, path)
+        elif obj.oneOf or obj.anyOf:
+            data_type = self.parse_root_type(name, obj, path)
+            if isinstance(data_type, EmptyDataType) and obj.properties:
+                self.parse_object(name, obj, path)  # pragma: no cover
+        elif obj.properties:
+            self.parse_object(name, obj, path)
+        elif obj.patternProperties:
+            self.parse_root_type(name, obj, path)
+        elif obj.type == "object":
+            self.parse_object(name, obj, path)
+        elif obj.enum and not self.should_parse_enum_as_literal(obj):
+            self.parse_enum(name, obj, path)
+        else:
+            self.parse_root_type(name, obj, path)
+        self.parse_ref(obj, path)
+
+    def _get_context_source_path_parts(self) -> Iterator[tuple[Source, list[str]]]:
+        if isinstance(self.source, list) or (isinstance(self.source, Path) and self.source.is_dir()):
+            self.current_source_path = Path()
+            self.model_resolver.after_load_files = {
+                self.base_path.joinpath(s.path).resolve().as_posix() for s in self.iter_source
+            }
+
+        for source in self.iter_source:
+            if isinstance(self.source, ParseResult):
+                path_parts = self.get_url_path_parts(self.source)
+            else:
+                path_parts = list(source.path.parts)
+            if self.current_source_path is not None:
+                self.current_source_path = source.path
+            with (
+                self.model_resolver.current_base_path_context(source.path.parent),
+                self.model_resolver.current_root_context(path_parts),
+            ):
+                yield source, path_parts
+
+    def parse_raw(self) -> None:
+        for source, path_parts in self._get_context_source_path_parts():
+            self.raw_obj = load_yaml(source.text)
+            if self.raw_obj is None:  # pragma: no cover
+                warn(f"{source.path} is empty. Skipping this file", stacklevel=2)
+                continue
+            if self.custom_class_name_generator:
+                obj_name = self.raw_obj.get("title", "Model")
+            else:
+                if self.class_name:
+                    obj_name = self.class_name
+                else:
+                    # backward compatible
+                    obj_name = self.raw_obj.get("title", "Model")
+                    if not self.model_resolver.validate_name(obj_name):
+                        obj_name = title_to_class_name(obj_name)
+                if not self.model_resolver.validate_name(obj_name):
+                    raise InvalidClassNameError(obj_name)
+            self._parse_file(self.raw_obj, obj_name, path_parts)
+
+        self._resolve_unparsed_json_pointer()
+
+    def _resolve_unparsed_json_pointer(self) -> None:
+        model_count: int = len(self.results)
+        for source in self.iter_source:
+            path_parts = list(source.path.parts)
+            reserved_refs = self.reserved_refs.get(tuple(path_parts))
+            if not reserved_refs:
+                continue
+            if self.current_source_path is not None:
+                self.current_source_path = source.path
+
+            with (
+                self.model_resolver.current_base_path_context(source.path.parent),
+                self.model_resolver.current_root_context(path_parts),
+            ):
+                for reserved_ref in sorted(reserved_refs):
+                    if self.model_resolver.add_ref(reserved_ref, resolved=True).loaded:
+                        continue
+                    # for root model
+                    self.raw_obj = load_yaml(source.text)
+                    self.parse_json_pointer(self.raw_obj, reserved_ref, path_parts)
+
+        if model_count != len(self.results):
+            # New model have been generated. It try to resolve json pointer again.
+            self._resolve_unparsed_json_pointer()
+
+    def parse_json_pointer(self, raw: dict[str, Any], ref: str, path_parts: list[str]) -> None:
+        path = ref.split("#", 1)[-1]
+        if path[0] == "/":  # pragma: no cover
+            path = path[1:]
+        object_paths = path.split("/")
+        models = get_model_by_path(raw, object_paths)
+        model_name = object_paths[-1]
+
+        self.parse_raw_obj(model_name, models, [*path_parts, f"#/{object_paths[0]}", *object_paths[1:]])
+
+    def _parse_file(  # noqa: PLR0912
+        self,
+        raw: dict[str, Any],
+        obj_name: str,
+        path_parts: list[str],
+        object_paths: list[str] | None = None,
+    ) -> None:
+        object_paths = [o for o in object_paths or [] if o]
+        path = [*path_parts, f"#/{object_paths[0]}", *object_paths[1:]] if object_paths else path_parts
+        with self.model_resolver.current_root_context(path_parts):
+            obj_name = self.model_resolver.add(path, obj_name, unique=False, class_name=True).name
+            with self.root_id_context(raw):
+                # Some jsonschema docs include attribute self to have include version details
+                raw.pop("self", None)
+                # parse $id before parsing $ref
+                root_obj = self.SCHEMA_OBJECT_TYPE.parse_obj(raw)
+                self.parse_id(root_obj, path_parts)
+                definitions: dict[Any, Any] | None = None
+                _schema_path = ""
+                for _schema_path, split_schema_path in self.schema_paths:
+                    try:
+                        definitions = get_model_by_path(raw, split_schema_path)
+                        if definitions:
+                            break
+                    except KeyError:
+                        continue
+                if definitions is None:
+                    definitions = {}
+
+                for key, model in definitions.items():
+                    obj = self.SCHEMA_OBJECT_TYPE.parse_obj(model)
+                    self.parse_id(obj, [*path_parts, _schema_path, key])
+
+                if object_paths:
+                    models = get_model_by_path(raw, object_paths)
+                    model_name = object_paths[-1]
+                    self.parse_obj(model_name, self.SCHEMA_OBJECT_TYPE.parse_obj(models), path)
+                else:
+                    self.parse_obj(obj_name, root_obj, path_parts or ["#"])
+                for key, model in definitions.items():
+                    path = [*path_parts, _schema_path, key]
+                    reference = self.model_resolver.get(path)
+                    if not reference or not reference.loaded:
+                        self.parse_raw_obj(key, model, path)
+
+                key = tuple(path_parts)
+                reserved_refs = set(self.reserved_refs.get(key) or [])
+                while reserved_refs:
+                    for reserved_path in sorted(reserved_refs):
+                        reference = self.model_resolver.get(reserved_path)
+                        if not reference or reference.loaded:
+                            continue
+                        object_paths = reserved_path.split("#/", 1)[-1].split("/")
+                        path = reserved_path.split("/")
+                        models = get_model_by_path(raw, object_paths)
+                        model_name = object_paths[-1]
+                        self.parse_obj(model_name, self.SCHEMA_OBJECT_TYPE.parse_obj(models), path)
+                    previous_reserved_refs = reserved_refs
+                    reserved_refs = set(self.reserved_refs.get(key) or [])
+                    if previous_reserved_refs == reserved_refs:
+                        break
diff -pruN 0.26.4-3/src/datamodel_code_generator/parser/openapi.py 0.34.0-1/src/datamodel_code_generator/parser/openapi.py
--- 0.26.4-3/src/datamodel_code_generator/parser/openapi.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/parser/openapi.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,643 @@
+from __future__ import annotations
+
+import re
+from collections import defaultdict
+from enum import Enum
+from re import Pattern
+from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, TypeVar, Union
+from warnings import warn
+
+from pydantic import Field
+
+from datamodel_code_generator import (
+    Error,
+    LiteralType,
+    OpenAPIScope,
+    PythonVersion,
+    PythonVersionMin,
+    load_yaml,
+    snooper_to_methods,
+)
+from datamodel_code_generator.format import DEFAULT_FORMATTERS, DatetimeClassType, Formatter
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model import pydantic as pydantic_model
+from datamodel_code_generator.parser import DefaultPutDict  # noqa: TC001 # needed for type check
+from datamodel_code_generator.parser.base import get_special_path
+from datamodel_code_generator.parser.jsonschema import (
+    JsonSchemaObject,
+    JsonSchemaParser,
+    get_model_by_path,
+)
+from datamodel_code_generator.reference import snake_to_upper_camel
+from datamodel_code_generator.types import (
+    DataType,
+    DataTypeManager,
+    EmptyDataType,
+    StrictTypes,
+)
+from datamodel_code_generator.util import BaseModel
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable, Mapping, Sequence
+    from pathlib import Path
+    from urllib.parse import ParseResult
+
+
+RE_APPLICATION_JSON_PATTERN: Pattern[str] = re.compile(r"^application/.*json$")
+
+OPERATION_NAMES: list[str] = [
+    "get",
+    "put",
+    "post",
+    "delete",
+    "patch",
+    "head",
+    "options",
+    "trace",
+]
+
+
+class ParameterLocation(Enum):
+    query = "query"
+    header = "header"
+    path = "path"
+    cookie = "cookie"
+
+
+BaseModelT = TypeVar("BaseModelT", bound=BaseModel)
+
+
+class ReferenceObject(BaseModel):
+    ref: str = Field(..., alias="$ref")
+
+
+class ExampleObject(BaseModel):
+    summary: Optional[str] = None  # noqa: UP045
+    description: Optional[str] = None  # noqa: UP045
+    value: Any = None
+    externalValue: Optional[str] = None  # noqa: N815, UP045
+
+
+class MediaObject(BaseModel):
+    schema_: Optional[Union[ReferenceObject, JsonSchemaObject]] = Field(None, alias="schema")  # noqa: UP007, UP045
+    example: Any = None
+    examples: Optional[Union[str, ReferenceObject, ExampleObject]] = None  # noqa: UP007, UP045
+
+
+class ParameterObject(BaseModel):
+    name: Optional[str] = None  # noqa: UP045
+    in_: Optional[ParameterLocation] = Field(None, alias="in")  # noqa: UP045
+    description: Optional[str] = None  # noqa: UP045
+    required: bool = False
+    deprecated: bool = False
+    schema_: Optional[JsonSchemaObject] = Field(None, alias="schema")  # noqa: UP045
+    example: Any = None
+    examples: Optional[Union[str, ReferenceObject, ExampleObject]] = None  # noqa: UP007, UP045
+    content: dict[str, MediaObject] = {}  # noqa: RUF012
+
+
+class HeaderObject(BaseModel):
+    description: Optional[str] = None  # noqa: UP045
+    required: bool = False
+    deprecated: bool = False
+    schema_: Optional[JsonSchemaObject] = Field(None, alias="schema")  # noqa: UP045
+    example: Any = None
+    examples: Optional[Union[str, ReferenceObject, ExampleObject]] = None  # noqa: UP007, UP045
+    content: dict[str, MediaObject] = {}  # noqa: RUF012
+
+
+class RequestBodyObject(BaseModel):
+    description: Optional[str] = None  # noqa: UP045
+    content: dict[str, MediaObject] = {}  # noqa: RUF012
+    required: bool = False
+
+
+class ResponseObject(BaseModel):
+    description: Optional[str] = None  # noqa: UP045
+    headers: dict[str, ParameterObject] = {}  # noqa: RUF012
+    content: dict[Union[str, int], MediaObject] = {}  # noqa: RUF012, UP007
+
+
+class Operation(BaseModel):
+    tags: list[str] = []  # noqa: RUF012
+    summary: Optional[str] = None  # noqa: UP045
+    description: Optional[str] = None  # noqa: UP045
+    operationId: Optional[str] = None  # noqa: N815, UP045
+    parameters: list[Union[ReferenceObject, ParameterObject]] = []  # noqa: RUF012, UP007
+    requestBody: Optional[Union[ReferenceObject, RequestBodyObject]] = None  # noqa: N815, UP007, UP045
+    responses: dict[Union[str, int], Union[ReferenceObject, ResponseObject]] = {}  # noqa: RUF012, UP007
+    deprecated: bool = False
+
+
+class ComponentsObject(BaseModel):
+    schemas: dict[str, Union[ReferenceObject, JsonSchemaObject]] = {}  # noqa: RUF012, UP007
+    responses: dict[str, Union[ReferenceObject, ResponseObject]] = {}  # noqa: RUF012, UP007
+    examples: dict[str, Union[ReferenceObject, ExampleObject]] = {}  # noqa: RUF012, UP007
+    requestBodies: dict[str, Union[ReferenceObject, RequestBodyObject]] = {}  # noqa: N815, RUF012, UP007
+    headers: dict[str, Union[ReferenceObject, HeaderObject]] = {}  # noqa: RUF012, UP007
+
+
+@snooper_to_methods()
+class OpenAPIParser(JsonSchemaParser):
+    SCHEMA_PATHS: ClassVar[list[str]] = ["#/components/schemas"]
+
+    def __init__(  # noqa: PLR0913
+        self,
+        source: str | Path | list[Path] | ParseResult,
+        *,
+        data_model_type: type[DataModel] = pydantic_model.BaseModel,
+        data_model_root_type: type[DataModel] = pydantic_model.CustomRootType,
+        data_type_manager_type: type[DataTypeManager] = pydantic_model.DataTypeManager,
+        data_model_field_type: type[DataModelFieldBase] = pydantic_model.DataModelField,
+        base_class: str | None = None,
+        additional_imports: list[str] | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        target_python_version: PythonVersion = PythonVersionMin,
+        dump_resolve_reference_action: Callable[[Iterable[str]], str] | None = None,
+        validation: bool = False,
+        field_constraints: bool = False,
+        snake_case_field: bool = False,
+        strip_default_none: bool = False,
+        aliases: Mapping[str, str] | None = None,
+        allow_population_by_field_name: bool = False,
+        allow_extra_fields: bool = False,
+        extra_fields: str | None = None,
+        apply_default_values_for_required_fields: bool = False,
+        force_optional_for_required_fields: bool = False,
+        class_name: str | None = None,
+        use_standard_collections: bool = False,
+        base_path: Path | None = None,
+        use_schema_description: bool = False,
+        use_field_description: bool = False,
+        use_default_kwarg: bool = False,
+        reuse_model: bool = False,
+        encoding: str = "utf-8",
+        enum_field_as_literal: LiteralType | None = None,
+        use_one_literal_as_default: bool = False,
+        set_default_enum_member: bool = False,
+        use_subclass_enum: bool = False,
+        strict_nullable: bool = False,
+        use_generic_container_types: bool = False,
+        enable_faux_immutability: bool = False,
+        remote_text_cache: DefaultPutDict[str, str] | None = None,
+        disable_appending_item_suffix: bool = False,
+        strict_types: Sequence[StrictTypes] | None = None,
+        empty_enum_field_name: str | None = None,
+        custom_class_name_generator: Callable[[str], str] | None = None,
+        field_extra_keys: set[str] | None = None,
+        field_include_all_keys: bool = False,
+        field_extra_keys_without_x_prefix: set[str] | None = None,
+        openapi_scopes: list[OpenAPIScope] | None = None,
+        include_path_parameters: bool = False,
+        wrap_string_literal: bool | None = False,
+        use_title_as_name: bool = False,
+        use_operation_id_as_name: bool = False,
+        use_unique_items_as_set: bool = False,
+        http_headers: Sequence[tuple[str, str]] | None = None,
+        http_ignore_tls: bool = False,
+        use_annotated: bool = False,
+        use_non_positive_negative_number_constrained_types: bool = False,
+        original_field_name_delimiter: str | None = None,
+        use_double_quotes: bool = False,
+        use_union_operator: bool = False,
+        allow_responses_without_content: bool = False,
+        collapse_root_models: bool = False,
+        special_field_name_prefix: str | None = None,
+        remove_special_field_name_prefix: bool = False,
+        capitalise_enum_members: bool = False,
+        keep_model_order: bool = False,
+        known_third_party: list[str] | None = None,
+        custom_formatters: list[str] | None = None,
+        custom_formatters_kwargs: dict[str, Any] | None = None,
+        use_pendulum: bool = False,
+        http_query_parameters: Sequence[tuple[str, str]] | None = None,
+        treat_dot_as_module: bool = False,
+        use_exact_imports: bool = False,
+        default_field_extras: dict[str, Any] | None = None,
+        target_datetime_class: DatetimeClassType | None = None,
+        keyword_only: bool = False,
+        frozen_dataclasses: bool = False,
+        no_alias: bool = False,
+        formatters: list[Formatter] = DEFAULT_FORMATTERS,
+        parent_scoped_naming: bool = False,
+    ) -> None:
+        target_datetime_class = target_datetime_class or DatetimeClassType.Awaredatetime
+        super().__init__(
+            source=source,
+            data_model_type=data_model_type,
+            data_model_root_type=data_model_root_type,
+            data_type_manager_type=data_type_manager_type,
+            data_model_field_type=data_model_field_type,
+            base_class=base_class,
+            additional_imports=additional_imports,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            target_python_version=target_python_version,
+            dump_resolve_reference_action=dump_resolve_reference_action,
+            validation=validation,
+            field_constraints=field_constraints,
+            snake_case_field=snake_case_field,
+            strip_default_none=strip_default_none,
+            aliases=aliases,
+            allow_population_by_field_name=allow_population_by_field_name,
+            allow_extra_fields=allow_extra_fields,
+            extra_fields=extra_fields,
+            apply_default_values_for_required_fields=apply_default_values_for_required_fields,
+            force_optional_for_required_fields=force_optional_for_required_fields,
+            class_name=class_name,
+            use_standard_collections=use_standard_collections,
+            base_path=base_path,
+            use_schema_description=use_schema_description,
+            use_field_description=use_field_description,
+            use_default_kwarg=use_default_kwarg,
+            reuse_model=reuse_model,
+            encoding=encoding,
+            enum_field_as_literal=enum_field_as_literal,
+            use_one_literal_as_default=use_one_literal_as_default,
+            set_default_enum_member=set_default_enum_member,
+            use_subclass_enum=use_subclass_enum,
+            strict_nullable=strict_nullable,
+            use_generic_container_types=use_generic_container_types,
+            enable_faux_immutability=enable_faux_immutability,
+            remote_text_cache=remote_text_cache,
+            disable_appending_item_suffix=disable_appending_item_suffix,
+            strict_types=strict_types,
+            empty_enum_field_name=empty_enum_field_name,
+            custom_class_name_generator=custom_class_name_generator,
+            field_extra_keys=field_extra_keys,
+            field_include_all_keys=field_include_all_keys,
+            field_extra_keys_without_x_prefix=field_extra_keys_without_x_prefix,
+            wrap_string_literal=wrap_string_literal,
+            use_title_as_name=use_title_as_name,
+            use_operation_id_as_name=use_operation_id_as_name,
+            use_unique_items_as_set=use_unique_items_as_set,
+            http_headers=http_headers,
+            http_ignore_tls=http_ignore_tls,
+            use_annotated=use_annotated,
+            use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
+            original_field_name_delimiter=original_field_name_delimiter,
+            use_double_quotes=use_double_quotes,
+            use_union_operator=use_union_operator,
+            allow_responses_without_content=allow_responses_without_content,
+            collapse_root_models=collapse_root_models,
+            special_field_name_prefix=special_field_name_prefix,
+            remove_special_field_name_prefix=remove_special_field_name_prefix,
+            capitalise_enum_members=capitalise_enum_members,
+            keep_model_order=keep_model_order,
+            known_third_party=known_third_party,
+            custom_formatters=custom_formatters,
+            custom_formatters_kwargs=custom_formatters_kwargs,
+            use_pendulum=use_pendulum,
+            http_query_parameters=http_query_parameters,
+            treat_dot_as_module=treat_dot_as_module,
+            use_exact_imports=use_exact_imports,
+            default_field_extras=default_field_extras,
+            target_datetime_class=target_datetime_class,
+            keyword_only=keyword_only,
+            frozen_dataclasses=frozen_dataclasses,
+            no_alias=no_alias,
+            formatters=formatters,
+            parent_scoped_naming=parent_scoped_naming,
+        )
+        self.open_api_scopes: list[OpenAPIScope] = openapi_scopes or [OpenAPIScope.Schemas]
+        self.include_path_parameters: bool = include_path_parameters
+
+    def get_ref_model(self, ref: str) -> dict[str, Any]:
+        ref_file, ref_path = self.model_resolver.resolve_ref(ref).split("#", 1)
+        ref_body = self._get_ref_body(ref_file) if ref_file else self.raw_obj
+        return get_model_by_path(ref_body, ref_path.split("/")[1:])
+
+    def get_data_type(self, obj: JsonSchemaObject) -> DataType:
+        # OpenAPI 3.0 doesn't allow `null` in the `type` field and list of types
+        # https://swagger.io/docs/specification/data-models/data-types/#null
+        # OpenAPI 3.1 does allow `null` in the `type` field and is equivalent to
+        # a `nullable` flag on the property itself
+        if obj.nullable and self.strict_nullable and isinstance(obj.type, str):
+            obj.type = [obj.type, "null"]
+
+        return super().get_data_type(obj)
+
+    def resolve_object(self, obj: ReferenceObject | BaseModelT, object_type: type[BaseModelT]) -> BaseModelT:
+        if isinstance(obj, ReferenceObject):
+            ref_obj = self.get_ref_model(obj.ref)
+            return object_type.parse_obj(ref_obj)
+        return obj
+
+    def parse_schema(
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+    ) -> DataType:
+        if obj.is_array:
+            data_type = self.parse_array(name, obj, [*path, name])
+        elif obj.allOf:  # pragma: no cover
+            data_type = self.parse_all_of(name, obj, path)
+        elif obj.oneOf or obj.anyOf:  # pragma: no cover
+            data_type = self.parse_root_type(name, obj, path)
+            if isinstance(data_type, EmptyDataType) and obj.properties:
+                self.parse_object(name, obj, path)
+        elif obj.is_object:
+            data_type = self.parse_object(name, obj, path)
+        elif obj.enum:  # pragma: no cover
+            data_type = self.parse_enum(name, obj, path)
+        elif obj.ref:  # pragma: no cover
+            data_type = self.get_ref_data_type(obj.ref)
+        else:
+            data_type = self.get_data_type(obj)
+        self.parse_ref(obj, path)
+        return data_type
+
+    def parse_request_body(
+        self,
+        name: str,
+        request_body: RequestBodyObject,
+        path: list[str],
+    ) -> dict[str, DataType]:
+        data_types: dict[str, DataType] = {}
+        for (
+            media_type,
+            media_obj,
+        ) in request_body.content.items():
+            if isinstance(media_obj.schema_, JsonSchemaObject):
+                data_types[media_type] = self.parse_schema(name, media_obj.schema_, [*path, media_type])
+            elif media_obj.schema_ is not None:
+                data_types[media_type] = self.get_ref_data_type(media_obj.schema_.ref)
+        return data_types
+
+    def parse_responses(
+        self,
+        name: str,
+        responses: dict[str | int, ReferenceObject | ResponseObject],
+        path: list[str],
+    ) -> dict[str | int, dict[str, DataType]]:
+        data_types: defaultdict[str | int, dict[str, DataType]] = defaultdict(dict)
+        for status_code, detail in responses.items():
+            if isinstance(detail, ReferenceObject):
+                if not detail.ref:  # pragma: no cover
+                    continue
+                ref_model = self.get_ref_model(detail.ref)
+                content = {k: MediaObject.parse_obj(v) for k, v in ref_model.get("content", {}).items()}
+            else:
+                content = detail.content
+
+            if self.allow_responses_without_content and not content:
+                data_types[status_code]["application/json"] = DataType(type="None")
+
+            for content_type, obj in content.items():
+                object_schema = obj.schema_
+                if not object_schema:  # pragma: no cover
+                    continue
+                if isinstance(object_schema, JsonSchemaObject):
+                    data_types[status_code][content_type] = self.parse_schema(  # pyright: ignore[reportArgumentType]
+                        name,
+                        object_schema,
+                        [*path, str(status_code), content_type],  # pyright: ignore[reportArgumentType]
+                    )
+                else:
+                    data_types[status_code][content_type] = self.get_ref_data_type(  # pyright: ignore[reportArgumentType]
+                        object_schema.ref
+                    )
+
+        return data_types
+
+    @classmethod
+    def parse_tags(
+        cls,
+        name: str,  # noqa: ARG003
+        tags: list[str],
+        path: list[str],  # noqa: ARG003
+    ) -> list[str]:
+        return tags
+
+    @classmethod
+    def _get_model_name(cls, path_name: str, method: str, suffix: str) -> str:
+        camel_path_name = snake_to_upper_camel(path_name.replace("/", "_"))
+        return f"{camel_path_name}{method.capitalize()}{suffix}"
+
+    def parse_all_parameters(
+        self,
+        name: str,
+        parameters: list[ReferenceObject | ParameterObject],
+        path: list[str],
+    ) -> DataType | None:
+        fields: list[DataModelFieldBase] = []
+        exclude_field_names: set[str] = set()
+        reference = self.model_resolver.add(path, name, class_name=True, unique=True)
+        for parameter_ in parameters:
+            parameter = self.resolve_object(parameter_, ParameterObject)
+            parameter_name = parameter.name
+            if (
+                not parameter_name
+                or parameter.in_ not in {ParameterLocation.query, ParameterLocation.path}
+                or (parameter.in_ == ParameterLocation.path and not self.include_path_parameters)
+            ):
+                continue
+
+            if any(field.original_name == parameter_name for field in fields):
+                msg = f"Parameter name '{parameter_name}' is used more than once."
+                raise Exception(msg)  # noqa: TRY002
+
+            field_name, alias = self.model_resolver.get_valid_field_name_and_alias(
+                field_name=parameter_name, excludes=exclude_field_names
+            )
+            if parameter.schema_:
+                fields.append(
+                    self.get_object_field(
+                        field_name=field_name,
+                        field=parameter.schema_,
+                        field_type=self.parse_item(field_name, parameter.schema_, [*path, name, parameter_name]),
+                        original_field_name=parameter_name,
+                        required=parameter.required,
+                        alias=alias,
+                    )
+                )
+            else:
+                data_types: list[DataType] = []
+                object_schema: JsonSchemaObject | None = None
+                for (
+                    media_type,
+                    media_obj,
+                ) in parameter.content.items():
+                    if not media_obj.schema_:
+                        continue
+                    object_schema = self.resolve_object(media_obj.schema_, JsonSchemaObject)
+                    data_types.append(
+                        self.parse_item(
+                            field_name,
+                            object_schema,
+                            [*path, name, parameter_name, media_type],
+                        )
+                    )
+
+                if not data_types:
+                    continue
+                if len(data_types) == 1:
+                    data_type = data_types[0]
+                else:
+                    data_type = self.data_type(data_types=data_types)
+                    # multiple data_type parse as non-constraints field
+                    object_schema = None
+                fields.append(
+                    self.data_model_field_type(
+                        name=field_name,
+                        default=object_schema.default if object_schema else None,
+                        data_type=data_type,
+                        required=parameter.required,
+                        alias=alias,
+                        constraints=object_schema.dict()
+                        if object_schema and self.is_constraints_field(object_schema)
+                        else None,
+                        nullable=object_schema.nullable
+                        if object_schema and self.strict_nullable and (object_schema.has_default or parameter.required)
+                        else None,
+                        strip_default_none=self.strip_default_none,
+                        extras=self.get_field_extras(object_schema) if object_schema else {},
+                        use_annotated=self.use_annotated,
+                        use_field_description=self.use_field_description,
+                        use_default_kwarg=self.use_default_kwarg,
+                        original_name=parameter_name,
+                        has_default=object_schema.has_default if object_schema else False,
+                        type_has_null=object_schema.type_has_null if object_schema else None,
+                    )
+                )
+
+        if OpenAPIScope.Parameters in self.open_api_scopes and fields:
+            # Using _create_data_model from parent class JsonSchemaParser
+            # This method automatically adds frozen=True for DataClass types
+            self.results.append(
+                self._create_data_model(
+                    fields=fields,
+                    reference=reference,
+                    custom_base_class=self.base_class,
+                    custom_template_dir=self.custom_template_dir,
+                    keyword_only=self.keyword_only,
+                    treat_dot_as_module=self.treat_dot_as_module,
+                )
+            )
+            return self.data_type(reference=reference)
+
+        return None
+
+    def parse_operation(
+        self,
+        raw_operation: dict[str, Any],
+        path: list[str],
+    ) -> None:
+        operation = Operation.parse_obj(raw_operation)
+        path_name, method = path[-2:]
+        if self.use_operation_id_as_name:
+            if not operation.operationId:
+                msg = (
+                    f"All operations must have an operationId when --use_operation_id_as_name is set."
+                    f"The following path was missing an operationId: {path_name}"
+                )
+                raise Error(msg)
+            path_name = operation.operationId
+            method = ""
+        self.parse_all_parameters(
+            self._get_model_name(
+                path_name, method, suffix="Parameters" if self.include_path_parameters else "ParametersQuery"
+            ),
+            operation.parameters,
+            [*path, "parameters"],
+        )
+        if operation.requestBody:
+            if isinstance(operation.requestBody, ReferenceObject):
+                ref_model = self.get_ref_model(operation.requestBody.ref)
+                request_body = RequestBodyObject.parse_obj(ref_model)
+            else:
+                request_body = operation.requestBody
+            self.parse_request_body(
+                name=self._get_model_name(path_name, method, suffix="Request"),
+                request_body=request_body,
+                path=[*path, "requestBody"],
+            )
+        self.parse_responses(
+            name=self._get_model_name(path_name, method, suffix="Response"),
+            responses=operation.responses,
+            path=[*path, "responses"],
+        )
+        if OpenAPIScope.Tags in self.open_api_scopes:
+            self.parse_tags(
+                name=self._get_model_name(path_name, method, suffix="Tags"),
+                tags=operation.tags,
+                path=[*path, "tags"],
+            )
+
+    def parse_raw(self) -> None:  # noqa: PLR0912
+        for source, path_parts in self._get_context_source_path_parts():  # noqa: PLR1702
+            if self.validation:
+                warn(
+                    "Deprecated: `--validation` option is deprecated. the option will be removed in a future "
+                    "release. please use another tool to validate OpenAPI.\n",
+                    stacklevel=2,
+                )
+
+                try:
+                    from prance import BaseParser  # noqa: PLC0415
+
+                    BaseParser(
+                        spec_string=source.text,
+                        backend="openapi-spec-validator",
+                        encoding=self.encoding,
+                    )
+                except ImportError:  # pragma: no cover
+                    warn(
+                        "Warning: Validation was skipped for OpenAPI. `prance` or `openapi-spec-validator` are not "
+                        "installed.\n"
+                        "To use --validation option after datamodel-code-generator 0.24.0, Please run `$pip install "
+                        "'datamodel-code-generator[validation]'`.\n",
+                        stacklevel=2,
+                    )
+
+            specification: dict[str, Any] = load_yaml(source.text)
+            self.raw_obj = specification
+            schemas: dict[Any, Any] = specification.get("components", {}).get("schemas", {})
+            security: list[dict[str, list[str]]] | None = specification.get("security")
+            if OpenAPIScope.Schemas in self.open_api_scopes:
+                for (
+                    obj_name,
+                    raw_obj,
+                ) in schemas.items():
+                    self.parse_raw_obj(
+                        obj_name,
+                        raw_obj,
+                        [*path_parts, "#/components", "schemas", obj_name],
+                    )
+            if OpenAPIScope.Paths in self.open_api_scopes:
+                paths: dict[str, dict[str, Any]] = specification.get("paths", {})
+                parameters: list[dict[str, Any]] = [
+                    self._get_ref_body(p["$ref"]) if "$ref" in p else p
+                    for p in paths.get("parameters", [])
+                    if isinstance(p, dict)
+                ]
+                paths_path = [*path_parts, "#/paths"]
+                for path_name, methods_ in paths.items():
+                    # Resolve path items if applicable
+                    methods = self.get_ref_model(methods_["$ref"]) if "$ref" in methods_ else methods_
+                    paths_parameters = parameters.copy()
+                    if "parameters" in methods:
+                        paths_parameters.extend(methods["parameters"])
+                    relative_path_name = path_name[1:]
+                    if relative_path_name:
+                        path = [*paths_path, relative_path_name]
+                    else:  # pragma: no cover
+                        path = get_special_path("root", paths_path)
+                    for operation_name, raw_operation in methods.items():
+                        if operation_name not in OPERATION_NAMES:
+                            continue
+                        if paths_parameters:
+                            if "parameters" in raw_operation:  # pragma: no cover
+                                raw_operation["parameters"].extend(paths_parameters)
+                            else:
+                                raw_operation["parameters"] = paths_parameters
+                        if security is not None and "security" not in raw_operation:
+                            raw_operation["security"] = security
+                        self.parse_operation(
+                            raw_operation,
+                            [*path, operation_name],
+                        )
+
+        self._resolve_unparsed_json_pointer()
diff -pruN 0.26.4-3/src/datamodel_code_generator/pydantic_patch.py 0.34.0-1/src/datamodel_code_generator/pydantic_patch.py
--- 0.26.4-3/src/datamodel_code_generator/pydantic_patch.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/pydantic_patch.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+from __future__ import annotations
+
+import sys
+from typing import Any
+
+import pydantic.typing
+
+
+def patched_evaluate_forwardref(
+    forward_ref: Any, globalns: dict[str, Any], localns: dict[str, Any] | None = None
+) -> None:  # pragma: no cover
+    try:
+        return forward_ref._evaluate(globalns, localns or None, set())  # pragma: no cover  # noqa: SLF001
+    except TypeError:
+        # Fallback for Python 3.12 compatibility
+        return forward_ref._evaluate(globalns, localns or None, set(), recursive_guard=set())  # noqa: SLF001
+
+
+# Patch only Python3.12
+if sys.version_info >= (3, 12):
+    pydantic.typing.evaluate_forwardref = patched_evaluate_forwardref  # pyright: ignore[reportAttributeAccessIssue]
diff -pruN 0.26.4-3/src/datamodel_code_generator/reference.py 0.34.0-1/src/datamodel_code_generator/reference.py
--- 0.26.4-3/src/datamodel_code_generator/reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/reference.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,695 @@
+from __future__ import annotations
+
+import re
+from collections import defaultdict
+from contextlib import contextmanager
+from enum import Enum, auto
+from functools import cached_property, lru_cache
+from itertools import zip_longest
+from keyword import iskeyword
+from pathlib import Path, PurePath
+from re import Pattern
+from typing import TYPE_CHECKING, Any, Callable, ClassVar, NamedTuple, Optional, TypeVar
+from urllib.parse import ParseResult, urlparse
+
+import inflect
+import pydantic
+from packaging import version
+from pydantic import BaseModel
+
+from datamodel_code_generator.util import PYDANTIC_V2, ConfigDict, model_validator
+
+if TYPE_CHECKING:
+    from collections.abc import Generator, Mapping, Sequence
+    from collections.abc import Set as AbstractSet
+
+    from pydantic.typing import DictStrAny
+
+
+class _BaseModel(BaseModel):
+    _exclude_fields: ClassVar[set[str]] = set()
+    _pass_fields: ClassVar[set[str]] = set()
+
+    if not TYPE_CHECKING:
+
+        def __init__(self, **values: Any) -> None:
+            super().__init__(**values)
+            for pass_field_name in self._pass_fields:
+                if pass_field_name in values:
+                    setattr(self, pass_field_name, values[pass_field_name])
+
+    if not TYPE_CHECKING:
+        if PYDANTIC_V2:
+
+            def dict(  # noqa: PLR0913
+                self,
+                *,
+                include: AbstractSet[int | str] | Mapping[int | str, Any] | None = None,
+                exclude: AbstractSet[int | str] | Mapping[int | str, Any] | None = None,
+                by_alias: bool = False,
+                exclude_unset: bool = False,
+                exclude_defaults: bool = False,
+                exclude_none: bool = False,
+            ) -> DictStrAny:
+                return self.model_dump(
+                    include=include,
+                    exclude=set(exclude or ()) | self._exclude_fields,
+                    by_alias=by_alias,
+                    exclude_unset=exclude_unset,
+                    exclude_defaults=exclude_defaults,
+                    exclude_none=exclude_none,
+                )
+
+        else:
+
+            def dict(  # noqa: PLR0913
+                self,
+                *,
+                include: AbstractSet[int | str] | Mapping[int | str, Any] | None = None,
+                exclude: AbstractSet[int | str] | Mapping[int | str, Any] | None = None,
+                by_alias: bool = False,
+                skip_defaults: bool | None = None,
+                exclude_unset: bool = False,
+                exclude_defaults: bool = False,
+                exclude_none: bool = False,
+            ) -> DictStrAny:
+                return super().dict(
+                    include=include,
+                    exclude=set(exclude or ()) | self._exclude_fields,
+                    by_alias=by_alias,
+                    skip_defaults=skip_defaults,
+                    exclude_unset=exclude_unset,
+                    exclude_defaults=exclude_defaults,
+                    exclude_none=exclude_none,
+                )
+
+
+class Reference(_BaseModel):
+    path: str
+    original_name: str = ""
+    name: str
+    duplicate_name: Optional[str] = None  # noqa: UP045
+    loaded: bool = True
+    source: Optional[Any] = None  # noqa: UP045
+    children: list[Any] = []
+    _exclude_fields: ClassVar[set[str]] = {"children"}
+
+    @model_validator(mode="before")
+    def validate_original_name(cls, values: Any) -> Any:  # noqa: N805
+        """
+        If original_name is empty then, `original_name` is assigned `name`
+        """
+        if not isinstance(values, dict):  # pragma: no cover
+            return values
+        original_name = values.get("original_name")
+        if original_name:
+            return values
+
+        values["original_name"] = values.get("name", original_name)
+        return values
+
+    if PYDANTIC_V2:
+        # TODO[pydantic]: The following keys were removed: `copy_on_model_validation`.
+        # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
+        model_config = ConfigDict(  # pyright: ignore[reportAssignmentType]
+            arbitrary_types_allowed=True,
+            ignored_types=(cached_property,),
+            revalidate_instances="never",
+        )
+    else:
+
+        class Config:
+            arbitrary_types_allowed = True
+            keep_untouched = (cached_property,)
+            copy_on_model_validation = False if version.parse(pydantic.VERSION) < version.parse("1.9.2") else "none"
+
+    @property
+    def short_name(self) -> str:
+        return self.name.rsplit(".", 1)[-1]
+
+
+SINGULAR_NAME_SUFFIX: str = "Item"
+
+ID_PATTERN: Pattern[str] = re.compile(r"^#[^/].*")
+
+T = TypeVar("T")
+
+
+@contextmanager
+def context_variable(setter: Callable[[T], None], current_value: T, new_value: T) -> Generator[None, None, None]:
+    previous_value: T = current_value
+    setter(new_value)
+    try:
+        yield
+    finally:
+        setter(previous_value)
+
+
+_UNDER_SCORE_1: Pattern[str] = re.compile(r"([^_])([A-Z][a-z]+)")
+_UNDER_SCORE_2: Pattern[str] = re.compile(r"([a-z0-9])([A-Z])")
+
+
+@lru_cache
+def camel_to_snake(string: str) -> str:
+    subbed = _UNDER_SCORE_1.sub(r"\1_\2", string)
+    return _UNDER_SCORE_2.sub(r"\1_\2", subbed).lower()
+
+
+class FieldNameResolver:
+    def __init__(  # noqa: PLR0913, PLR0917
+        self,
+        aliases: Mapping[str, str] | None = None,
+        snake_case_field: bool = False,  # noqa: FBT001, FBT002
+        empty_field_name: str | None = None,
+        original_delimiter: str | None = None,
+        special_field_name_prefix: str | None = None,
+        remove_special_field_name_prefix: bool = False,  # noqa: FBT001, FBT002
+        capitalise_enum_members: bool = False,  # noqa: FBT001, FBT002
+        no_alias: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        self.aliases: Mapping[str, str] = {} if aliases is None else {**aliases}
+        self.empty_field_name: str = empty_field_name or "_"
+        self.snake_case_field = snake_case_field
+        self.original_delimiter: str | None = original_delimiter
+        self.special_field_name_prefix: str | None = (
+            "field" if special_field_name_prefix is None else special_field_name_prefix
+        )
+        self.remove_special_field_name_prefix: bool = remove_special_field_name_prefix
+        self.capitalise_enum_members: bool = capitalise_enum_members
+        self.no_alias = no_alias
+
+    @classmethod
+    def _validate_field_name(cls, field_name: str) -> bool:  # noqa: ARG003
+        return True
+
+    def get_valid_name(  # noqa: PLR0912
+        self,
+        name: str,
+        excludes: set[str] | None = None,
+        ignore_snake_case_field: bool = False,  # noqa: FBT001, FBT002
+        upper_camel: bool = False,  # noqa: FBT001, FBT002
+    ) -> str:
+        if not name:
+            name = self.empty_field_name
+        if name[0] == "#":
+            name = name[1:] or self.empty_field_name
+
+        if self.snake_case_field and not ignore_snake_case_field and self.original_delimiter is not None:
+            name = snake_to_upper_camel(name, delimiter=self.original_delimiter)
+
+        name = re.sub(r"[¹²³⁴⁵⁶⁷⁸⁹]|\W", "_", name)
+        if name[0].isnumeric():
+            name = f"{self.special_field_name_prefix}_{name}"
+
+        # We should avoid having a field begin with an underscore, as it
+        # causes pydantic to consider it as private
+        while name.startswith("_"):
+            if self.remove_special_field_name_prefix:
+                name = name[1:]
+            else:
+                name = f"{self.special_field_name_prefix}{name}"
+                break
+        if self.capitalise_enum_members or (self.snake_case_field and not ignore_snake_case_field):
+            name = camel_to_snake(name)
+        count = 1
+        if iskeyword(name) or not self._validate_field_name(name):
+            name += "_"
+        if upper_camel:
+            new_name = snake_to_upper_camel(name)
+        elif self.capitalise_enum_members:
+            new_name = name.upper()
+        else:
+            new_name = name
+        while (
+            not (new_name.isidentifier() or not self._validate_field_name(new_name))
+            or iskeyword(new_name)
+            or (excludes and new_name in excludes)
+        ):
+            new_name = f"{name}{count}" if upper_camel else f"{name}_{count}"
+            count += 1
+        return new_name
+
+    def get_valid_field_name_and_alias(
+        self, field_name: str, excludes: set[str] | None = None
+    ) -> tuple[str, str | None]:
+        if field_name in self.aliases:
+            return self.aliases[field_name], field_name
+        valid_name = self.get_valid_name(field_name, excludes=excludes)
+        return (
+            valid_name,
+            None if self.no_alias or field_name == valid_name else field_name,
+        )
+
+
+class PydanticFieldNameResolver(FieldNameResolver):
+    @classmethod
+    def _validate_field_name(cls, field_name: str) -> bool:
+        # TODO: Support Pydantic V2
+        return not hasattr(BaseModel, field_name)
+
+
+class EnumFieldNameResolver(FieldNameResolver):
+    def get_valid_name(
+        self,
+        name: str,
+        excludes: set[str] | None = None,
+        ignore_snake_case_field: bool = False,  # noqa: FBT001, FBT002
+        upper_camel: bool = False,  # noqa: FBT001, FBT002
+    ) -> str:
+        return super().get_valid_name(
+            name="mro_" if name == "mro" else name,
+            excludes={"mro"} | (excludes or set()),
+            ignore_snake_case_field=ignore_snake_case_field,
+            upper_camel=upper_camel,
+        )
+
+
+class ModelType(Enum):
+    PYDANTIC = auto()
+    ENUM = auto()
+    CLASS = auto()
+
+
+DEFAULT_FIELD_NAME_RESOLVERS: dict[ModelType, type[FieldNameResolver]] = {
+    ModelType.ENUM: EnumFieldNameResolver,
+    ModelType.PYDANTIC: PydanticFieldNameResolver,
+    ModelType.CLASS: FieldNameResolver,
+}
+
+
+class ClassName(NamedTuple):
+    name: str
+    duplicate_name: str | None
+
+
+def get_relative_path(base_path: PurePath, target_path: PurePath) -> PurePath:
+    if base_path == target_path:
+        return Path()
+    if not target_path.is_absolute():
+        return target_path
+    parent_count: int = 0
+    children: list[str] = []
+    for base_part, target_part in zip_longest(base_path.parts, target_path.parts):
+        if base_part == target_part and not parent_count:
+            continue
+        if base_part or not target_part:
+            parent_count += 1
+        if target_part:
+            children.append(target_part)
+    return Path(*[".." for _ in range(parent_count)], *children)
+
+
+class ModelResolver:  # noqa: PLR0904
+    def __init__(  # noqa: PLR0913, PLR0917
+        self,
+        exclude_names: set[str] | None = None,
+        duplicate_name_suffix: str | None = None,
+        base_url: str | None = None,
+        singular_name_suffix: str | None = None,
+        aliases: Mapping[str, str] | None = None,
+        snake_case_field: bool = False,  # noqa: FBT001, FBT002
+        empty_field_name: str | None = None,
+        custom_class_name_generator: Callable[[str], str] | None = None,
+        base_path: Path | None = None,
+        field_name_resolver_classes: dict[ModelType, type[FieldNameResolver]] | None = None,
+        original_field_name_delimiter: str | None = None,
+        special_field_name_prefix: str | None = None,
+        remove_special_field_name_prefix: bool = False,  # noqa: FBT001, FBT002
+        capitalise_enum_members: bool = False,  # noqa: FBT001, FBT002
+        no_alias: bool = False,  # noqa: FBT001, FBT002
+        remove_suffix_number: bool = False,  # noqa: FBT001, FBT002
+        parent_scoped_naming: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        self.references: dict[str, Reference] = {}
+        self._current_root: Sequence[str] = []
+        self._root_id: str | None = None
+        self._root_id_base_path: str | None = None
+        self.ids: defaultdict[str, dict[str, str]] = defaultdict(dict)
+        self.after_load_files: set[str] = set()
+        self.exclude_names: set[str] = exclude_names or set()
+        self.duplicate_name_suffix: str | None = duplicate_name_suffix
+        self._base_url: str | None = base_url
+        self.singular_name_suffix: str = (
+            singular_name_suffix if isinstance(singular_name_suffix, str) else SINGULAR_NAME_SUFFIX
+        )
+        merged_field_name_resolver_classes = DEFAULT_FIELD_NAME_RESOLVERS.copy()
+        if field_name_resolver_classes:  # pragma: no cover
+            merged_field_name_resolver_classes.update(field_name_resolver_classes)
+        self.field_name_resolvers: dict[ModelType, FieldNameResolver] = {
+            k: v(
+                aliases=aliases,
+                snake_case_field=snake_case_field,
+                empty_field_name=empty_field_name,
+                original_delimiter=original_field_name_delimiter,
+                special_field_name_prefix=special_field_name_prefix,
+                remove_special_field_name_prefix=remove_special_field_name_prefix,
+                capitalise_enum_members=capitalise_enum_members if k == ModelType.ENUM else False,
+                no_alias=no_alias,
+            )
+            for k, v in merged_field_name_resolver_classes.items()
+        }
+        self.class_name_generator = custom_class_name_generator or self.default_class_name_generator
+        self._base_path: Path = base_path or Path.cwd()
+        self._current_base_path: Path | None = self._base_path
+        self.remove_suffix_number: bool = remove_suffix_number
+        self.parent_scoped_naming = parent_scoped_naming
+
+    @property
+    def current_base_path(self) -> Path | None:
+        return self._current_base_path
+
+    def set_current_base_path(self, base_path: Path | None) -> None:
+        self._current_base_path = base_path
+
+    @property
+    def base_url(self) -> str | None:
+        return self._base_url
+
+    def set_base_url(self, base_url: str | None) -> None:
+        self._base_url = base_url
+
+    @contextmanager
+    def current_base_path_context(self, base_path: Path | None) -> Generator[None, None, None]:
+        if base_path:
+            base_path = (self._base_path / base_path).resolve()
+        with context_variable(self.set_current_base_path, self.current_base_path, base_path):
+            yield
+
+    @contextmanager
+    def base_url_context(self, base_url: str) -> Generator[None, None, None]:
+        if self._base_url:
+            with context_variable(self.set_base_url, self.base_url, base_url):
+                yield
+        else:
+            yield
+
+    @property
+    def current_root(self) -> Sequence[str]:
+        if len(self._current_root) > 1:
+            return self._current_root
+        return self._current_root
+
+    def set_current_root(self, current_root: Sequence[str]) -> None:
+        self._current_root = current_root
+
+    @contextmanager
+    def current_root_context(self, current_root: Sequence[str]) -> Generator[None, None, None]:
+        with context_variable(self.set_current_root, self.current_root, current_root):
+            yield
+
+    @property
+    def root_id(self) -> str | None:
+        return self._root_id
+
+    @property
+    def root_id_base_path(self) -> str | None:
+        return self._root_id_base_path
+
+    def set_root_id(self, root_id: str | None) -> None:
+        if root_id and "/" in root_id:
+            self._root_id_base_path = root_id.rsplit("/", 1)[0]
+        else:
+            self._root_id_base_path = None
+
+        self._root_id = root_id
+
+    def add_id(self, id_: str, path: Sequence[str]) -> None:
+        self.ids["/".join(self.current_root)][id_] = self.resolve_ref(path)
+
+    def resolve_ref(self, path: Sequence[str] | str) -> str:  # noqa: PLR0911, PLR0912
+        joined_path = path if isinstance(path, str) else self.join_path(path)
+        if joined_path == "#":
+            return f"{'/'.join(self.current_root)}#"
+        if self.current_base_path and not self.base_url and joined_path[0] != "#" and not is_url(joined_path):
+            # resolve local file path
+            file_path, *object_part = joined_path.split("#", 1)
+            resolved_file_path = Path(self.current_base_path, file_path).resolve()
+            joined_path = get_relative_path(self._base_path, resolved_file_path).as_posix()
+            if object_part:
+                joined_path += f"#{object_part[0]}"
+        if ID_PATTERN.match(joined_path):
+            ref: str = self.ids["/".join(self.current_root)][joined_path]
+        else:
+            if "#" not in joined_path:
+                joined_path += "#"
+            elif joined_path[0] == "#":
+                joined_path = f"{'/'.join(self.current_root)}{joined_path}"
+
+            delimiter = joined_path.index("#")
+            file_path = "".join(joined_path[:delimiter])
+            ref = f"{''.join(joined_path[:delimiter])}#{''.join(joined_path[delimiter + 1 :])}"
+            if self.root_id_base_path and not (is_url(joined_path) or Path(self._base_path, file_path).is_file()):
+                ref = f"{self.root_id_base_path}/{ref}"
+
+        if self.base_url:
+            from .http import join_url  # noqa: PLC0415
+
+            joined_url = join_url(self.base_url, ref)
+            if "#" in joined_url:
+                return joined_url
+            return f"{joined_url}#"
+
+        if is_url(ref):
+            file_part, path_part = ref.split("#", 1)
+            if file_part == self.root_id:
+                return f"{'/'.join(self.current_root)}#{path_part}"
+            target_url: ParseResult = urlparse(file_part)
+            if not (self.root_id and self.current_base_path):
+                return ref
+            root_id_url: ParseResult = urlparse(self.root_id)
+            if (target_url.scheme, target_url.netloc) == (
+                root_id_url.scheme,
+                root_id_url.netloc,
+            ):  # pragma: no cover
+                target_url_path = Path(target_url.path)
+                relative_target_base = get_relative_path(Path(root_id_url.path).parent, target_url_path.parent)
+                target_path = self.current_base_path / relative_target_base / target_url_path.name
+                if target_path.exists():
+                    return f"{target_path.resolve().relative_to(self._base_path)}#{path_part}"
+
+        return ref
+
+    def is_after_load(self, ref: str) -> bool:
+        if is_url(ref) or not self.current_base_path:
+            return False
+        file_part, *_ = ref.split("#", 1)
+        absolute_path = Path(self._base_path, file_part).resolve().as_posix()
+        if self.is_external_root_ref(ref) or self.is_external_ref(ref):
+            return absolute_path in self.after_load_files
+        return False  # pragma: no cover
+
+    @staticmethod
+    def is_external_ref(ref: str) -> bool:
+        return "#" in ref and ref[0] != "#"
+
+    @staticmethod
+    def is_external_root_ref(ref: str) -> bool:
+        return ref[-1] == "#"
+
+    @staticmethod
+    def join_path(path: Sequence[str]) -> str:
+        joined_path = "/".join(p for p in path if p).replace("/#", "#")
+        if "#" not in joined_path:
+            joined_path += "#"
+        return joined_path
+
+    def add_ref(self, ref: str, resolved: bool = False) -> Reference:  # noqa: FBT001, FBT002
+        path = self.resolve_ref(ref) if not resolved else ref
+        reference = self.references.get(path)
+        if reference:
+            return reference
+        split_ref = ref.rsplit("/", 1)
+        if len(split_ref) == 1:
+            original_name = Path(split_ref[0].rstrip("#") if self.is_external_root_ref(path) else split_ref[0]).stem
+        else:
+            original_name = Path(split_ref[1].rstrip("#")).stem if self.is_external_root_ref(path) else split_ref[1]
+        name = self.get_class_name(original_name, unique=False).name
+        reference = Reference(
+            path=path,
+            original_name=original_name,
+            name=name,
+            loaded=False,
+        )
+
+        self.references[path] = reference
+        return reference
+
+    def _check_parent_scope_option(self, name: str, path: Sequence[str]) -> str:
+        if self.parent_scoped_naming:
+            parent_reference = None
+            parent_path = path[:-1]
+            while parent_path:
+                parent_reference = self.references.get(self.join_path(parent_path))
+                if parent_reference is not None:
+                    break
+                parent_path = parent_path[:-1]
+            if parent_reference:
+                name = f"{parent_reference.name}_{name}"
+        return name
+
+    def add(  # noqa: PLR0913
+        self,
+        path: Sequence[str],
+        original_name: str,
+        *,
+        class_name: bool = False,
+        singular_name: bool = False,
+        unique: bool = True,
+        singular_name_suffix: str | None = None,
+        loaded: bool = False,
+    ) -> Reference:
+        joined_path = self.join_path(path)
+        reference: Reference | None = self.references.get(joined_path)
+        if reference:
+            if loaded and not reference.loaded:
+                reference.loaded = True
+            if not original_name or original_name in {reference.original_name, reference.name}:
+                return reference
+        name = original_name
+        duplicate_name: str | None = None
+        if class_name:
+            name = self._check_parent_scope_option(name, path)
+            name, duplicate_name = self.get_class_name(
+                name=name,
+                unique=unique,
+                reserved_name=reference.name if reference else None,
+                singular_name=singular_name,
+                singular_name_suffix=singular_name_suffix,
+            )
+        else:
+            # TODO: create a validate for module name
+            name = self.get_valid_field_name(name, model_type=ModelType.CLASS)
+            if singular_name:  # pragma: no cover
+                name = get_singular_name(name, singular_name_suffix or self.singular_name_suffix)
+            elif unique:  # pragma: no cover
+                unique_name = self._get_unique_name(name)
+                if unique_name == name:
+                    duplicate_name = name
+                name = unique_name
+        if reference:
+            reference.original_name = original_name
+            reference.name = name
+            reference.loaded = loaded
+            reference.duplicate_name = duplicate_name
+        else:
+            reference = Reference(
+                path=joined_path,
+                original_name=original_name,
+                name=name,
+                loaded=loaded,
+                duplicate_name=duplicate_name,
+            )
+            self.references[joined_path] = reference
+        return reference
+
+    def get(self, path: Sequence[str] | str) -> Reference | None:
+        return self.references.get(self.resolve_ref(path))
+
+    def delete(self, path: Sequence[str] | str) -> None:
+        if self.resolve_ref(path) in self.references:
+            del self.references[self.resolve_ref(path)]
+
+    def default_class_name_generator(self, name: str) -> str:
+        # TODO: create a validate for class name
+        return self.field_name_resolvers[ModelType.CLASS].get_valid_name(
+            name, ignore_snake_case_field=True, upper_camel=True
+        )
+
+    def get_class_name(
+        self,
+        name: str,
+        unique: bool = True,  # noqa: FBT001, FBT002
+        reserved_name: str | None = None,
+        singular_name: bool = False,  # noqa: FBT001, FBT002
+        singular_name_suffix: str | None = None,
+    ) -> ClassName:
+        if "." in name:
+            split_name = name.split(".")
+            prefix = ".".join(
+                # TODO: create a validate for class name
+                self.field_name_resolvers[ModelType.CLASS].get_valid_name(n, ignore_snake_case_field=True)
+                for n in split_name[:-1]
+            )
+            prefix += "."
+            class_name = split_name[-1]
+        else:
+            prefix = ""
+            class_name = name
+
+        class_name = self.class_name_generator(class_name)
+
+        if singular_name:
+            class_name = get_singular_name(class_name, singular_name_suffix or self.singular_name_suffix)
+        duplicate_name: str | None = None
+        if unique:
+            if reserved_name == class_name:
+                return ClassName(name=class_name, duplicate_name=duplicate_name)
+
+            unique_name = self._get_unique_name(class_name, camel=True)
+            if unique_name != class_name:
+                duplicate_name = class_name
+            class_name = unique_name
+        return ClassName(name=f"{prefix}{class_name}", duplicate_name=duplicate_name)
+
+    def _get_unique_name(self, name: str, camel: bool = False) -> str:  # noqa: FBT001, FBT002
+        unique_name: str = name
+        count: int = 0 if self.remove_suffix_number else 1
+        reference_names = {r.name for r in self.references.values()} | self.exclude_names
+        while unique_name in reference_names:
+            if self.duplicate_name_suffix:
+                name_parts: list[str | int] = [
+                    name,
+                    self.duplicate_name_suffix,
+                    count - 1,
+                ]
+            else:
+                name_parts = [name, count]
+            delimiter = "" if camel else "_"
+            unique_name = delimiter.join(str(p) for p in name_parts if p) if count else name
+            count += 1
+        return unique_name
+
+    @classmethod
+    def validate_name(cls, name: str) -> bool:
+        return name.isidentifier() and not iskeyword(name)
+
+    def get_valid_field_name(
+        self,
+        name: str,
+        excludes: set[str] | None = None,
+        model_type: ModelType = ModelType.PYDANTIC,
+    ) -> str:
+        return self.field_name_resolvers[model_type].get_valid_name(name, excludes)
+
+    def get_valid_field_name_and_alias(
+        self,
+        field_name: str,
+        excludes: set[str] | None = None,
+        model_type: ModelType = ModelType.PYDANTIC,
+    ) -> tuple[str, str | None]:
+        return self.field_name_resolvers[model_type].get_valid_field_name_and_alias(field_name, excludes)
+
+
+@lru_cache
+def get_singular_name(name: str, suffix: str = SINGULAR_NAME_SUFFIX) -> str:
+    singular_name = inflect_engine.singular_noun(name)
+    if singular_name is False:
+        singular_name = f"{name}{suffix}"
+    return singular_name  # pyright: ignore[reportReturnType]
+
+
+@lru_cache
+def snake_to_upper_camel(word: str, delimiter: str = "_") -> str:
+    prefix = ""
+    if word.startswith(delimiter):
+        prefix = "_"
+        word = word[1:]
+
+    return prefix + "".join(x[0].upper() + x[1:] for x in word.split(delimiter) if x)
+
+
+def is_url(ref: str) -> bool:
+    return ref.startswith(("https://", "http://"))
+
+
+inflect_engine = inflect.engine()
diff -pruN 0.26.4-3/src/datamodel_code_generator/types.py 0.34.0-1/src/datamodel_code_generator/types.py
--- 0.26.4-3/src/datamodel_code_generator/types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/types.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,644 @@
+from __future__ import annotations
+
+import re
+from abc import ABC, abstractmethod
+from enum import Enum, auto
+from functools import lru_cache
+from itertools import chain
+from re import Pattern
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    ClassVar,
+    Optional,
+    Protocol,
+    TypeVar,
+    Union,
+    runtime_checkable,
+)
+
+import pydantic
+from packaging import version
+from pydantic import StrictBool, StrictInt, StrictStr, create_model
+
+from datamodel_code_generator.format import (
+    DatetimeClassType,
+    PythonVersion,
+    PythonVersionMin,
+)
+from datamodel_code_generator.imports import (
+    IMPORT_ABC_MAPPING,
+    IMPORT_ABC_SEQUENCE,
+    IMPORT_ABC_SET,
+    IMPORT_DICT,
+    IMPORT_FROZEN_SET,
+    IMPORT_LIST,
+    IMPORT_LITERAL,
+    IMPORT_MAPPING,
+    IMPORT_OPTIONAL,
+    IMPORT_SEQUENCE,
+    IMPORT_SET,
+    IMPORT_UNION,
+    Import,
+)
+from datamodel_code_generator.reference import Reference, _BaseModel
+from datamodel_code_generator.util import PYDANTIC_V2, ConfigDict
+
+if TYPE_CHECKING:
+    import builtins
+    from collections.abc import Iterable, Iterator, Sequence
+
+if PYDANTIC_V2:
+    from pydantic import GetCoreSchemaHandler
+    from pydantic_core import core_schema
+
+T = TypeVar("T")
+
+OPTIONAL = "Optional"
+OPTIONAL_PREFIX = f"{OPTIONAL}["
+
+UNION = "Union"
+UNION_PREFIX = f"{UNION}["
+UNION_DELIMITER = ", "
+UNION_PATTERN: Pattern[str] = re.compile(r"\s*,\s*")
+UNION_OPERATOR_DELIMITER = " | "
+UNION_OPERATOR_PATTERN: Pattern[str] = re.compile(r"\s*\|\s*")
+NONE = "None"
+ANY = "Any"
+LITERAL = "Literal"
+SEQUENCE = "Sequence"
+FROZEN_SET = "FrozenSet"
+MAPPING = "Mapping"
+DICT = "Dict"
+SET = "Set"
+LIST = "List"
+STANDARD_DICT = "dict"
+STANDARD_LIST = "list"
+STANDARD_SET = "set"
+STR = "str"
+
+NOT_REQUIRED = "NotRequired"
+NOT_REQUIRED_PREFIX = f"{NOT_REQUIRED}["
+
+
+class StrictTypes(Enum):
+    str = "str"
+    bytes = "bytes"
+    int = "int"
+    float = "float"
+    bool = "bool"
+
+
+class UnionIntFloat:
+    def __init__(self, value: float) -> None:
+        self.value: int | float = value
+
+    def __int__(self) -> int:
+        return int(self.value)
+
+    def __float__(self) -> float:
+        return float(self.value)
+
+    def __str__(self) -> str:
+        return str(self.value)
+
+    @classmethod
+    def __get_validators__(cls) -> Iterator[Callable[[Any], Any]]:  # noqa: PLW3201
+        yield cls.validate
+
+    @classmethod
+    def __get_pydantic_core_schema__(  # noqa: PLW3201
+        cls, _source_type: Any, _handler: GetCoreSchemaHandler
+    ) -> core_schema.CoreSchema:
+        from_int_schema = core_schema.chain_schema(  # pyright: ignore[reportPossiblyUnboundVariable]
+            [
+                core_schema.union_schema(  # pyright: ignore[reportPossiblyUnboundVariable]
+                    [core_schema.int_schema(), core_schema.float_schema()]  # pyright: ignore[reportPossiblyUnboundVariable]
+                ),
+                core_schema.no_info_plain_validator_function(cls.validate),  # pyright: ignore[reportPossiblyUnboundVariable]
+            ]
+        )
+
+        return core_schema.json_or_python_schema(  # pyright: ignore[reportPossiblyUnboundVariable]
+            json_schema=from_int_schema,
+            python_schema=core_schema.union_schema(  # pyright: ignore[reportPossiblyUnboundVariable]
+                [
+                    # check if it's an instance first before doing any further work
+                    core_schema.is_instance_schema(UnionIntFloat),  # pyright: ignore[reportPossiblyUnboundVariable]
+                    from_int_schema,
+                ]
+            ),
+            serialization=core_schema.plain_serializer_function_ser_schema(  # pyright: ignore[reportPossiblyUnboundVariable]
+                lambda instance: instance.value
+            ),
+        )
+
+    @classmethod
+    def validate(cls, v: Any) -> UnionIntFloat:
+        if isinstance(v, UnionIntFloat):
+            return v
+        if not isinstance(v, (int, float)):  # pragma: no cover
+            try:
+                int(v)
+                return cls(v)
+            except (TypeError, ValueError):
+                pass
+            try:
+                float(v)
+                return cls(v)
+            except (TypeError, ValueError):
+                pass
+
+            msg = f"{v} is not int or float"
+            raise TypeError(msg)
+        return cls(v)
+
+
+def chain_as_tuple(*iterables: Iterable[T]) -> tuple[T, ...]:
+    return tuple(chain(*iterables))
+
+
+@lru_cache
+def _remove_none_from_type(type_: str, split_pattern: Pattern[str], delimiter: str) -> list[str]:
+    types: list[str] = []
+    split_type: str = ""
+    inner_count: int = 0
+    for part in re.split(split_pattern, type_):
+        if part == NONE:
+            continue
+        inner_count += part.count("[") - part.count("]")
+        if split_type:
+            split_type += delimiter
+        if inner_count == 0:
+            if split_type:
+                types.append(f"{split_type}{part}")
+            else:
+                types.append(part)
+            split_type = ""
+            continue
+        split_type += part
+    return types
+
+
+def _remove_none_from_union(type_: str, *, use_union_operator: bool) -> str:  # noqa: PLR0912
+    if use_union_operator:
+        if " | " not in type_:
+            return type_
+        separator = "|"
+        inner_text = type_
+    else:
+        if not type_.startswith(UNION_PREFIX):
+            return type_
+        separator = ","
+        inner_text = type_[len(UNION_PREFIX) : -1]
+
+    parts = []
+    inner_count = 0
+    current_part = ""
+
+    # With this variable we count any non-escaped round bracket, whenever we are inside a
+    # constraint string expression. Once found a part starting with `constr(`, we increment
+    # this counter for each non-escaped opening round bracket and decrement it for each
+    # non-escaped closing round bracket.
+    in_constr = 0
+
+    # Parse union parts carefully to handle nested structures
+    for char in inner_text:
+        current_part += char
+        if char == "[" and in_constr == 0:
+            inner_count += 1
+        elif char == "]" and in_constr == 0:
+            inner_count -= 1
+        elif char == "(":
+            if current_part.strip().startswith("constr(") and current_part[-2] != "\\":
+                # non-escaped opening round bracket found inside constraint string expression
+                in_constr += 1
+        elif char == ")":
+            if in_constr > 0 and current_part[-2] != "\\":
+                # non-escaped closing round bracket found inside constraint string expression
+                in_constr -= 1
+        elif char == separator and inner_count == 0 and in_constr == 0:
+            part = current_part[:-1].strip()
+            if part != NONE:
+                # Process nested unions recursively
+                # only UNION_PREFIX might be nested but not union_operator
+                if not use_union_operator and part.startswith(UNION_PREFIX):
+                    part = _remove_none_from_union(part, use_union_operator=False)
+                parts.append(part)
+            current_part = ""
+
+    part = current_part.strip()
+    if current_part and part != NONE:
+        # only UNION_PREFIX might be nested but not union_operator
+        if not use_union_operator and part.startswith(UNION_PREFIX):
+            part = _remove_none_from_union(part, use_union_operator=False)
+        parts.append(part)
+
+    if not parts:
+        return NONE
+    if len(parts) == 1:
+        return parts[0]
+
+    if use_union_operator:
+        return UNION_OPERATOR_DELIMITER.join(parts)
+
+    return f"{UNION_PREFIX}{UNION_DELIMITER.join(parts)}]"
+
+
+@lru_cache
+def get_optional_type(type_: str, use_union_operator: bool) -> str:  # noqa: FBT001
+    type_ = _remove_none_from_union(type_, use_union_operator=use_union_operator)
+
+    if not type_ or type_ == NONE:
+        return NONE
+    if use_union_operator:
+        return f"{type_} | {NONE}"
+    return f"{OPTIONAL_PREFIX}{type_}]"
+
+
+@runtime_checkable
+class Modular(Protocol):
+    @property
+    def module_name(self) -> str:
+        raise NotImplementedError
+
+
+@runtime_checkable
+class Nullable(Protocol):
+    @property
+    def nullable(self) -> bool:
+        raise NotImplementedError
+
+
+class DataType(_BaseModel):
+    if PYDANTIC_V2:
+        # TODO[pydantic]: The following keys were removed: `copy_on_model_validation`.
+        # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
+        model_config = ConfigDict(  # pyright: ignore[reportAssignmentType]
+            extra="forbid",
+            revalidate_instances="never",
+        )
+    else:
+        if not TYPE_CHECKING:
+
+            @classmethod
+            def model_rebuild(cls) -> None:
+                cls.update_forward_refs()
+
+        class Config:
+            extra = "forbid"
+            copy_on_model_validation = False if version.parse(pydantic.VERSION) < version.parse("1.9.2") else "none"
+
+    type: Optional[str] = None  # noqa: UP045
+    reference: Optional[Reference] = None  # noqa: UP045
+    data_types: list[DataType] = []  # noqa: RUF012
+    is_func: bool = False
+    kwargs: Optional[dict[str, Any]] = None  # noqa: UP045
+    import_: Optional[Import] = None  # noqa: UP045
+    python_version: PythonVersion = PythonVersionMin
+    is_optional: bool = False
+    is_dict: bool = False
+    is_list: bool = False
+    is_set: bool = False
+    is_custom_type: bool = False
+    literals: list[Union[StrictBool, StrictInt, StrictStr]] = []  # noqa: RUF012, UP007
+    use_standard_collections: bool = False
+    use_generic_container: bool = False
+    use_union_operator: bool = False
+    alias: Optional[str] = None  # noqa: UP045
+    parent: Optional[Any] = None  # noqa: UP045
+    children: list[Any] = []  # noqa: RUF012
+    strict: bool = False
+    dict_key: Optional[DataType] = None  # noqa: UP045
+    treat_dot_as_module: bool = False
+
+    _exclude_fields: ClassVar[set[str]] = {"parent", "children"}
+    _pass_fields: ClassVar[set[str]] = {"parent", "children", "data_types", "reference"}
+
+    @classmethod
+    def from_import(  # noqa: PLR0913
+        cls: builtins.type[DataTypeT],
+        import_: Import,
+        *,
+        is_optional: bool = False,
+        is_dict: bool = False,
+        is_list: bool = False,
+        is_set: bool = False,
+        is_custom_type: bool = False,
+        strict: bool = False,
+        kwargs: dict[str, Any] | None = None,
+    ) -> DataTypeT:
+        return cls(
+            type=import_.import_,
+            import_=import_,
+            is_optional=is_optional,
+            is_dict=is_dict,
+            is_list=is_list,
+            is_set=is_set,
+            is_func=bool(kwargs),
+            is_custom_type=is_custom_type,
+            strict=strict,
+            kwargs=kwargs,
+        )
+
+    @property
+    def unresolved_types(self) -> frozenset[str]:
+        return frozenset(
+            {t.reference.path for data_types in self.data_types for t in data_types.all_data_types if t.reference}
+            | ({self.reference.path} if self.reference else set())
+        )
+
+    def replace_reference(self, reference: Reference | None) -> None:
+        if not self.reference:  # pragma: no cover
+            msg = f"`{self.__class__.__name__}.replace_reference()` can't be called when `reference` field is empty."
+            raise Exception(msg)  # noqa: TRY002
+        self_id = id(self)
+        self.reference.children = [c for c in self.reference.children if id(c) != self_id]
+        self.reference = reference
+        if reference:
+            reference.children.append(self)
+
+    def remove_reference(self) -> None:
+        self.replace_reference(None)
+
+    @property
+    def module_name(self) -> str | None:
+        if self.reference and isinstance(self.reference.source, Modular):
+            return self.reference.source.module_name
+        return None  # pragma: no cover
+
+    @property
+    def full_name(self) -> str:
+        module_name = self.module_name
+        if module_name:
+            return f"{module_name}.{self.reference.short_name if self.reference else ''}"
+        return self.reference.short_name if self.reference else ""
+
+    @property
+    def all_data_types(self) -> Iterator[DataType]:
+        for data_type in self.data_types:
+            yield from data_type.all_data_types
+        yield self
+
+    @property
+    def all_imports(self) -> Iterator[Import]:
+        for data_type in self.data_types:
+            yield from data_type.all_imports
+        yield from self.imports
+
+    @property
+    def imports(self) -> Iterator[Import]:
+        # Add base import if exists
+        if self.import_:
+            yield self.import_
+
+        # Define required imports based on type features and conditions
+        imports: tuple[tuple[bool, Import], ...] = (
+            (self.is_optional and not self.use_union_operator, IMPORT_OPTIONAL),
+            (len(self.data_types) > 1 and not self.use_union_operator, IMPORT_UNION),
+            (bool(self.literals), IMPORT_LITERAL),
+        )
+
+        if self.use_generic_container:
+            if self.use_standard_collections:
+                imports = (
+                    *imports,
+                    (self.is_list, IMPORT_ABC_SEQUENCE),
+                    (self.is_set, IMPORT_ABC_SET),
+                    (self.is_dict, IMPORT_ABC_MAPPING),
+                )
+            else:
+                imports = (
+                    *imports,
+                    (self.is_list, IMPORT_SEQUENCE),
+                    (self.is_set, IMPORT_FROZEN_SET),
+                    (self.is_dict, IMPORT_MAPPING),
+                )
+        elif not self.use_standard_collections:
+            imports = (
+                *imports,
+                (self.is_list, IMPORT_LIST),
+                (self.is_set, IMPORT_SET),
+                (self.is_dict, IMPORT_DICT),
+            )
+
+        # Yield imports based on conditions
+        for field, import_ in imports:
+            if field and import_ != self.import_:
+                yield import_
+
+        # Propagate imports from any dict_key type
+        if self.dict_key:
+            yield from self.dict_key.imports
+
+    def __init__(self, **values: Any) -> None:
+        if not TYPE_CHECKING:
+            super().__init__(**values)
+
+        for type_ in self.data_types:
+            if type_.type == ANY and type_.is_optional:
+                if any(t for t in self.data_types if t.type != ANY):  # pragma: no cover
+                    self.is_optional = True
+                    self.data_types = [t for t in self.data_types if not (t.type == ANY and t.is_optional)]
+                break  # pragma: no cover
+
+        for data_type in self.data_types:
+            if data_type.reference or data_type.data_types:
+                data_type.parent = self
+
+        if self.reference:
+            self.reference.children.append(self)
+
+    @property
+    def type_hint(self) -> str:  # noqa: PLR0912, PLR0915
+        type_: str | None = self.alias or self.type
+        if not type_:
+            if self.is_union:
+                data_types: list[str] = []
+                for data_type in self.data_types:
+                    data_type_type = data_type.type_hint
+                    if data_type_type in data_types:  # pragma: no cover
+                        continue
+
+                    if data_type_type == NONE:
+                        self.is_optional = True
+                        continue
+
+                    non_optional_data_type_type = _remove_none_from_union(
+                        data_type_type, use_union_operator=self.use_union_operator
+                    )
+
+                    if non_optional_data_type_type != data_type_type:
+                        self.is_optional = True
+
+                    data_types.append(non_optional_data_type_type)
+                if len(data_types) == 1:
+                    type_ = data_types[0]
+                elif self.use_union_operator:
+                    type_ = UNION_OPERATOR_DELIMITER.join(data_types)
+                else:
+                    type_ = f"{UNION_PREFIX}{UNION_DELIMITER.join(data_types)}]"
+            elif len(self.data_types) == 1:
+                type_ = self.data_types[0].type_hint
+            elif self.literals:
+                type_ = f"{LITERAL}[{', '.join(repr(literal) for literal in self.literals)}]"
+            elif self.reference:
+                type_ = self.reference.short_name
+            else:
+                # TODO support strict Any
+                type_ = ""
+        if self.reference:
+            source = self.reference.source
+            if isinstance(source, Nullable) and source.nullable:
+                self.is_optional = True
+        if self.is_list:
+            if self.use_generic_container:
+                list_ = SEQUENCE
+            elif self.use_standard_collections:
+                list_ = STANDARD_LIST
+            else:
+                list_ = LIST
+            type_ = f"{list_}[{type_}]" if type_ else list_
+        elif self.is_set:
+            if self.use_generic_container:
+                set_ = FROZEN_SET
+            elif self.use_standard_collections:
+                set_ = STANDARD_SET
+            else:
+                set_ = SET
+            type_ = f"{set_}[{type_}]" if type_ else set_
+        elif self.is_dict:
+            if self.use_generic_container:
+                dict_ = MAPPING
+            elif self.use_standard_collections:
+                dict_ = STANDARD_DICT
+            else:
+                dict_ = DICT
+            if self.dict_key or type_:
+                key = self.dict_key.type_hint if self.dict_key else STR
+                type_ = f"{dict_}[{key}, {type_ or ANY}]"
+            else:  # pragma: no cover
+                type_ = dict_
+        if self.is_optional and type_ != ANY:
+            return get_optional_type(type_, self.use_union_operator)
+        if self.is_func:
+            if self.kwargs:
+                kwargs: str = ", ".join(f"{k}={v}" for k, v in self.kwargs.items())
+                return f"{type_}({kwargs})"
+            return f"{type_}()"
+        return type_
+
+    @property
+    def is_union(self) -> bool:
+        return len(self.data_types) > 1
+
+
+DataType.model_rebuild()
+
+DataTypeT = TypeVar("DataTypeT", bound=DataType)
+
+
+class EmptyDataType(DataType):
+    pass
+
+
+class Types(Enum):
+    integer = auto()
+    int32 = auto()
+    int64 = auto()
+    number = auto()
+    float = auto()
+    double = auto()
+    decimal = auto()
+    time = auto()
+    string = auto()
+    byte = auto()
+    binary = auto()
+    date = auto()
+    date_time = auto()
+    timedelta = auto()
+    password = auto()
+    path = auto()
+    email = auto()
+    uuid = auto()
+    uuid1 = auto()
+    uuid2 = auto()
+    uuid3 = auto()
+    uuid4 = auto()
+    uuid5 = auto()
+    uri = auto()
+    hostname = auto()
+    ipv4 = auto()
+    ipv4_network = auto()
+    ipv6 = auto()
+    ipv6_network = auto()
+    boolean = auto()
+    object = auto()
+    null = auto()
+    array = auto()
+    any = auto()
+
+
+class DataTypeManager(ABC):
+    def __init__(  # noqa: PLR0913, PLR0917
+        self,
+        python_version: PythonVersion = PythonVersionMin,
+        use_standard_collections: bool = False,  # noqa: FBT001, FBT002
+        use_generic_container_types: bool = False,  # noqa: FBT001, FBT002
+        strict_types: Sequence[StrictTypes] | None = None,
+        use_non_positive_negative_number_constrained_types: bool = False,  # noqa: FBT001, FBT002
+        use_union_operator: bool = False,  # noqa: FBT001, FBT002
+        use_pendulum: bool = False,  # noqa: FBT001, FBT002
+        target_datetime_class: DatetimeClassType | None = None,
+        treat_dot_as_module: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        self.python_version = python_version
+        self.use_standard_collections: bool = use_standard_collections
+        self.use_generic_container_types: bool = use_generic_container_types
+        self.strict_types: Sequence[StrictTypes] = strict_types or ()
+        self.use_non_positive_negative_number_constrained_types: bool = (
+            use_non_positive_negative_number_constrained_types
+        )
+        self.use_union_operator: bool = use_union_operator
+        self.use_pendulum: bool = use_pendulum
+        self.target_datetime_class: DatetimeClassType | None = target_datetime_class
+        self.treat_dot_as_module: bool = treat_dot_as_module
+
+        if TYPE_CHECKING:
+            self.data_type: type[DataType]
+        else:
+            self.data_type: type[DataType] = create_model(
+                "ContextDataType",
+                python_version=(PythonVersion, python_version),
+                use_standard_collections=(bool, use_standard_collections),
+                use_generic_container=(bool, use_generic_container_types),
+                use_union_operator=(bool, use_union_operator),
+                treat_dot_as_module=(bool, treat_dot_as_module),
+                __base__=DataType,
+            )
+
+    @abstractmethod
+    def get_data_type(self, types: Types, **kwargs: Any) -> DataType:
+        raise NotImplementedError
+
+    def get_data_type_from_full_path(self, full_path: str, is_custom_type: bool) -> DataType:  # noqa: FBT001
+        return self.data_type.from_import(Import.from_full_path(full_path), is_custom_type=is_custom_type)
+
+    def get_data_type_from_value(self, value: Any) -> DataType:
+        type_: Types | None = None
+        if isinstance(value, str):
+            type_ = Types.string
+        elif isinstance(value, bool):
+            type_ = Types.boolean
+        elif isinstance(value, int):
+            type_ = Types.integer
+        elif isinstance(value, float):
+            type_ = Types.float
+        elif isinstance(value, dict):
+            return self.data_type.from_import(IMPORT_DICT)
+        elif isinstance(value, list):
+            return self.data_type.from_import(IMPORT_LIST)
+        else:
+            type_ = Types.any
+        return self.get_data_type(type_)
diff -pruN 0.26.4-3/src/datamodel_code_generator/util.py 0.34.0-1/src/datamodel_code_generator/util.py
--- 0.26.4-3/src/datamodel_code_generator/util.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/src/datamodel_code_generator/util.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,138 @@
+from __future__ import annotations
+
+import copy
+from typing import TYPE_CHECKING, Any, Callable, Literal, TypeVar, overload
+
+import pydantic
+from packaging import version
+from pydantic import BaseModel as _BaseModel
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+PYDANTIC_VERSION = version.parse(pydantic.VERSION if isinstance(pydantic.VERSION, str) else str(pydantic.VERSION))
+
+PYDANTIC_V2: bool = version.parse("2.0b3") <= PYDANTIC_VERSION
+
+try:
+    from yaml import CSafeLoader as SafeLoader
+except ImportError:  # pragma: no cover
+    from yaml import SafeLoader
+
+try:
+    from tomllib import load as load_tomllib  # type: ignore[ignoreMissingImports]
+except ImportError:
+    from tomli import load as load_tomllib  # type: ignore[ignoreMissingImports]
+
+
+def load_toml(path: Path) -> dict[str, Any]:
+    with path.open("rb") as f:
+        return load_tomllib(f)
+
+
+SafeLoaderTemp = copy.deepcopy(SafeLoader)
+SafeLoaderTemp.yaml_constructors = copy.deepcopy(SafeLoader.yaml_constructors)
+SafeLoaderTemp.add_constructor(
+    "tag:yaml.org,2002:timestamp",
+    SafeLoaderTemp.yaml_constructors["tag:yaml.org,2002:str"],
+)
+SafeLoader = SafeLoaderTemp
+
+Model = TypeVar("Model", bound=_BaseModel)
+T = TypeVar("T")
+
+
+@overload
+def model_validator(
+    mode: Literal["before"],
+) -> (
+    Callable[[Callable[[type[Model], T], T]], Callable[[type[Model], T], T]]
+    | Callable[[Callable[[Model, T], T]], Callable[[Model, T], T]]
+): ...
+
+
+@overload
+def model_validator(
+    mode: Literal["after"],
+) -> (
+    Callable[[Callable[[type[Model], T], T]], Callable[[type[Model], T], T]]
+    | Callable[[Callable[[Model, T], T]], Callable[[Model, T], T]]
+    | Callable[[Callable[[Model], Model]], Callable[[Model], Model]]
+): ...
+
+
+@overload
+def model_validator() -> (
+    Callable[[Callable[[type[Model], T], T]], Callable[[type[Model], T], T]]
+    | Callable[[Callable[[Model, T], T]], Callable[[Model, T], T]]
+    | Callable[[Callable[[Model], Model]], Callable[[Model], Model]]
+): ...
+
+
+def model_validator(  # pyright: ignore[reportInconsistentOverload]
+    mode: Literal["before", "after"] = "after",
+) -> (
+    Callable[[Callable[[type[Model], T], T]], Callable[[type[Model], T], T]]
+    | Callable[[Callable[[Model, T], T]], Callable[[Model, T], T]]
+    | Callable[[Callable[[Model], Model]], Callable[[Model], Model]]
+):
+    """
+    Decorator for model validators in Pydantic models.
+
+    Uses `model_validator` in Pydantic v2 and `root_validator` in Pydantic v1.
+
+    We support only `before` mode because `after` mode needs different validator
+    implementation for v1 and v2.
+    """
+
+    @overload
+    def inner(method: Callable[[type[Model], T], T]) -> Callable[[type[Model], T], T]: ...
+
+    @overload
+    def inner(method: Callable[[Model, T], T]) -> Callable[[Model, T], T]: ...
+
+    @overload
+    def inner(method: Callable[[Model], Model]) -> Callable[[Model], Model]: ...
+
+    def inner(
+        method: Callable[[type[Model], T], T] | Callable[[Model, T], T] | Callable[[Model], Model],
+    ) -> Callable[[type[Model], T], T] | Callable[[Model, T], T] | Callable[[Model], Model]:
+        if PYDANTIC_V2:
+            from pydantic import model_validator as model_validator_v2  # noqa: PLC0415
+
+            if method == "before":
+                return model_validator_v2(mode=mode)(classmethod(method))  # type: ignore[reportReturnType]
+            return model_validator_v2(mode=mode)(method)  # type: ignore[reportReturnType]
+        from pydantic import root_validator  # noqa: PLC0415
+
+        return root_validator(method, pre=mode == "before")  # pyright: ignore[reportCallIssue]
+
+    return inner
+
+
+def field_validator(
+    field_name: str,
+    *fields: str,
+    mode: Literal["before", "after"] = "after",
+) -> Callable[[Any], Callable[[BaseModel, Any], Any]]:
+    def inner(method: Callable[[Model, Any], Any]) -> Callable[[Model, Any], Any]:
+        if PYDANTIC_V2:
+            from pydantic import field_validator as field_validator_v2  # noqa: PLC0415
+
+            return field_validator_v2(field_name, *fields, mode=mode)(method)
+        from pydantic import validator  # noqa: PLC0415
+
+        return validator(field_name, *fields, pre=mode == "before")(method)  # pyright: ignore[reportReturnType]
+
+    return inner
+
+
+if PYDANTIC_V2:
+    from pydantic import ConfigDict
+else:
+    ConfigDict = dict
+
+
+class BaseModel(_BaseModel):
+    if PYDANTIC_V2:
+        model_config = ConfigDict(strict=False)  # pyright: ignore[reportAssignmentType]
diff -pruN 0.26.4-3/tests/conftest.py 0.34.0-1/tests/conftest.py
--- 0.26.4-3/tests/conftest.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/conftest.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,10 @@
+from __future__ import annotations
+
+import pytest
+
+from datamodel_code_generator import MIN_VERSION
+
+
+@pytest.fixture(scope="session")
+def min_version() -> str:
+    return f"3.{MIN_VERSION}"
diff -pruN 0.26.4-3/tests/data/csv/simple.csv 0.34.0-1/tests/data/csv/simple.csv
--- 0.26.4-3/tests/data/csv/simple.csv	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/csv/simple.csv	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,4 @@
+id,name,tel,zip code
+1,taro,0123456789,98765
+2,ken,234567891,98764
+3,ichiro,345678912,98763
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/custom_file_header.txt 0.34.0-1/tests/data/custom_file_header.txt
--- 0.26.4-3/tests/data/custom_file_header.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/custom_file_header.txt	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# multiline custom ;
+# header ;
+# file ;
diff -pruN 0.26.4-3/tests/data/expected/main/csv/csv_file_simple.py 0.34.0-1/tests/data/expected/main/csv/csv_file_simple.py
--- 0.26.4-3/tests/data/expected/main/csv/csv_file_simple.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/csv/csv_file_simple.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  simple.csv
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Model(BaseModel):
+    id: str
+    name: str
+    tel: str
+    zip_code: str = Field(..., alias='zip code')
diff -pruN 0.26.4-3/tests/data/expected/main/csv/csv_stdin_simple.py 0.34.0-1/tests/data/expected/main/csv/csv_stdin_simple.py
--- 0.26.4-3/tests/data/expected/main/csv/csv_stdin_simple.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/csv/csv_stdin_simple.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  <stdin>
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Model(BaseModel):
+    id: str
+    name: str
+    tel: str
+    zip_code: str = Field(..., alias='zip code')
diff -pruN 0.26.4-3/tests/data/expected/main/direct_input_dict.py 0.34.0-1/tests/data/expected/main/direct_input_dict.py
--- 0.26.4-3/tests/data/expected/main/direct_input_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/direct_input_dict.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  <dict>
+#   timestamp: 2024-12-14T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Bar(BaseModel):
+    baz: int
+
+
+class Model(BaseModel):
+    foo: int
+    bar: Bar
diff -pruN 0.26.4-3/tests/data/expected/main/frozen_dataclasses.py 0.34.0-1/tests/data/expected/main/frozen_dataclasses.py
--- 0.26.4-3/tests/data/expected/main/frozen_dataclasses.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/frozen_dataclasses.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  simple_frozen_test.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Optional
+
+
+@dataclass(frozen=True)
+class User:
+    name: str
+    age: int
+    email: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/frozen_dataclasses_keyword_only.py 0.34.0-1/tests/data/expected/main/frozen_dataclasses_keyword_only.py
--- 0.26.4-3/tests/data/expected/main/frozen_dataclasses_keyword_only.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/frozen_dataclasses_keyword_only.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  simple_frozen_test.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Optional
+
+
+@dataclass(kw_only=True, frozen=True)
+class User:
+    name: str
+    age: int
+    email: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/additional_imports_isort4.py 0.34.0-1/tests/data/expected/main/graphql/additional_imports_isort4.py
--- 0.26.4-3/tests/data/expected/main/graphql/additional_imports_isort4.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/additional_imports_isort4.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,42 @@
+# generated by datamodel-codegen:
+#   filename:  additional-imports.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import date, datetime
+from typing import Literal, Optional, TypeAlias
+
+from pydantic import BaseModel, Field
+
+from mymodule.myclass import MyCustomPythonClass
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+Date: TypeAlias = date
+
+
+DateTime: TypeAlias = datetime
+"""
+DateTime (ISO8601, example: 2020-01-01T10:11:12+00:00)
+"""
+
+
+MyCustomClass: TypeAlias = MyCustomPythonClass
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    a: Date
+    b: DateTime
+    c: MyCustomClass
+    typename__: Optional[Literal['A']] = Field('A', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/additional_imports_isort5.py 0.34.0-1/tests/data/expected/main/graphql/additional_imports_isort5.py
--- 0.26.4-3/tests/data/expected/main/graphql/additional_imports_isort5.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/additional_imports_isort5.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,41 @@
+# generated by datamodel-codegen:
+#   filename:  additional-imports.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import date, datetime
+from typing import Literal, Optional, TypeAlias
+
+from mymodule.myclass import MyCustomPythonClass
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+Date: TypeAlias = date
+
+
+DateTime: TypeAlias = datetime
+"""
+DateTime (ISO8601, example: 2020-01-01T10:11:12+00:00)
+"""
+
+
+MyCustomClass: TypeAlias = MyCustomPythonClass
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    a: Date
+    b: DateTime
+    c: MyCustomClass
+    typename__: Optional[Literal['A']] = Field('A', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/annotated.py 0.34.0-1/tests/data/expected/main/graphql/annotated.py
--- 0.26.4-3/tests/data/expected/main/graphql/annotated.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/annotated.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,31 @@
+# generated by datamodel-codegen:
+#   filename:  annotated.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Literal, Optional, TypeAlias
+
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    field: String
+    listField: List[String]
+    listListField: List[List[String]]
+    listOptionalField: List[Optional[String]]
+    optionalField: Optional[String] = None
+    optionalListField: Optional[List[String]] = None
+    optionalListOptionalField: Optional[List[Optional[String]]] = None
+    typename__: Annotated[Optional[Literal['A']], Field(alias='__typename')] = 'A'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/annotated_field_aliases.py 0.34.0-1/tests/data/expected/main/graphql/annotated_field_aliases.py
--- 0.26.4-3/tests/data/expected/main/graphql/annotated_field_aliases.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/annotated_field_aliases.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,31 @@
+# generated by datamodel-codegen:
+#   filename:  field-aliases.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Literal, Optional, TypeAlias
+
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+DateTime: TypeAlias = str
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class DateTimePeriod(BaseModel):
+    periodFrom: Annotated[DateTime, Field(alias='from')]
+    periodTo: Annotated[DateTime, Field(alias='to')]
+    typename__: Annotated[
+        Optional[Literal['DateTimePeriod']], Field(alias='__typename')
+    ] = 'DateTimePeriod'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/annotated_use_default_kwarg.py 0.34.0-1/tests/data/expected/main/graphql/annotated_use_default_kwarg.py
--- 0.26.4-3/tests/data/expected/main/graphql/annotated_use_default_kwarg.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/annotated_use_default_kwarg.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,31 @@
+# generated by datamodel-codegen:
+#   filename:  annotated.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, Optional, TypeAlias
+
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    field: String
+    listField: List[String]
+    listListField: List[List[String]]
+    listOptionalField: List[Optional[String]]
+    optionalField: Optional[String] = None
+    optionalListField: Optional[List[String]] = None
+    optionalListOptionalField: Optional[List[Optional[String]]] = None
+    typename__: Optional[Literal['A']] = Field(default='A', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/annotated_use_standard_collections.py 0.34.0-1/tests/data/expected/main/graphql/annotated_use_standard_collections.py
--- 0.26.4-3/tests/data/expected/main/graphql/annotated_use_standard_collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/annotated_use_standard_collections.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,31 @@
+# generated by datamodel-codegen:
+#   filename:  annotated.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Literal, Optional, TypeAlias
+
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    field: String
+    listField: list[String]
+    listListField: list[list[String]]
+    listOptionalField: list[Optional[String]]
+    optionalField: Optional[String] = None
+    optionalListField: Optional[list[String]] = None
+    optionalListOptionalField: Optional[list[Optional[String]]] = None
+    typename__: Annotated[Optional[Literal['A']], Field(alias='__typename')] = 'A'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/annotated_use_standard_collections_use_union_operator.py 0.34.0-1/tests/data/expected/main/graphql/annotated_use_standard_collections_use_union_operator.py
--- 0.26.4-3/tests/data/expected/main/graphql/annotated_use_standard_collections_use_union_operator.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/annotated_use_standard_collections_use_union_operator.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,31 @@
+# generated by datamodel-codegen:
+#   filename:  annotated.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Literal, TypeAlias
+
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    field: String
+    listField: list[String]
+    listListField: list[list[String]]
+    listOptionalField: list[String | None]
+    optionalField: String | None = None
+    optionalListField: list[String] | None = None
+    optionalListOptionalField: list[String | None] | None = None
+    typename__: Annotated[Literal['A'] | None, Field(alias='__typename')] = 'A'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/annotated_use_union_operator.py 0.34.0-1/tests/data/expected/main/graphql/annotated_use_union_operator.py
--- 0.26.4-3/tests/data/expected/main/graphql/annotated_use_union_operator.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/annotated_use_union_operator.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,31 @@
+# generated by datamodel-codegen:
+#   filename:  annotated.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Literal, TypeAlias
+
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    field: String
+    listField: List[String]
+    listListField: List[List[String]]
+    listOptionalField: List[String | None]
+    optionalField: String | None = None
+    optionalListField: List[String] | None = None
+    optionalListOptionalField: List[String | None] | None = None
+    typename__: Annotated[Literal['A'] | None, Field(alias='__typename')] = 'A'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/custom_formatters.py 0.34.0-1/tests/data/expected/main/graphql/custom_formatters.py
--- 0.26.4-3/tests/data/expected/main/graphql/custom_formatters.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/custom_formatters.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,36 @@
+# generated by datamodel-codegen:
+#   filename:  custom-scalar-types.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+# a comment
+from __future__ import annotations
+
+from typing import Literal, Optional, TypeAlias
+
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+ID: TypeAlias = str
+"""
+The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID.
+"""
+
+
+Long: TypeAlias = str
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    duration: Long
+    id: ID
+    typename__: Optional[Literal['A']] = Field('A', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/custom_scalar_types.py 0.34.0-1/tests/data/expected/main/graphql/custom_scalar_types.py
--- 0.26.4-3/tests/data/expected/main/graphql/custom_scalar_types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/custom_scalar_types.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,35 @@
+# generated by datamodel-codegen:
+#   filename:  custom-scalar-types.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional, TypeAlias
+
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+ID: TypeAlias = str
+"""
+The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID.
+"""
+
+
+Long: TypeAlias = int
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    duration: Long
+    id: ID
+    typename__: Optional[Literal['A']] = Field('A', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/different_types_of_fields.py 0.34.0-1/tests/data/expected/main/graphql/different_types_of_fields.py
--- 0.26.4-3/tests/data/expected/main/graphql/different_types_of_fields.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/different_types_of_fields.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  different-types-of-fields.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, Optional, TypeAlias
+
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    field: String
+    listField: List[String]
+    listListField: List[List[String]]
+    listListOptionalField: List[List[Optional[String]]]
+    listOptionalField: List[Optional[String]]
+    listOptionalListField: List[Optional[List[String]]]
+    listOptionalListOptionalField: List[Optional[List[Optional[String]]]]
+    optionalField: Optional[String] = None
+    optionalListListField: Optional[List[List[String]]] = None
+    optionalListListOptionalField: Optional[List[List[Optional[String]]]] = None
+    optionalListOptionalField: Optional[List[Optional[String]]] = None
+    optionalListOptionalListField: Optional[List[Optional[List[String]]]] = None
+    optionalListOptionalListOptionalField: Optional[
+        List[Optional[List[Optional[String]]]]
+    ] = None
+    typename__: Optional[Literal['A']] = Field('A', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/enums.py 0.34.0-1/tests/data/expected/main/graphql/enums.py
--- 0.26.4-3/tests/data/expected/main/graphql/enums.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/enums.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,38 @@
+# generated by datamodel-codegen:
+#   filename:  enums.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class Color(Enum):
+    BLUE = 'BLUE'
+    GREEN = 'GREEN'
+    RED = 'RED'
+
+
+class EmployeeShiftStatus(Enum):
+    """
+    Employee shift status
+    """
+
+    NOT_ON_SHIFT = 'NOT_ON_SHIFT'
+    ON_SHIFT = 'ON_SHIFT'
+
+
+class EnumWithOneField(Enum):
+    FIELD = 'FIELD'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/field_aliases.py 0.34.0-1/tests/data/expected/main/graphql/field_aliases.py
--- 0.26.4-3/tests/data/expected/main/graphql/field_aliases.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/field_aliases.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,31 @@
+# generated by datamodel-codegen:
+#   filename:  field-aliases.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional, TypeAlias
+
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+DateTime: TypeAlias = str
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class DateTimePeriod(BaseModel):
+    periodFrom: DateTime = Field(..., alias='from')
+    periodTo: DateTime = Field(..., alias='to')
+    typename__: Optional[Literal['DateTimePeriod']] = Field(
+        'DateTimePeriod', alias='__typename'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/github_api.py 0.34.0-1/tests/data/expected/main/graphql/github_api.py
--- 0.26.4-3/tests/data/expected/main/graphql/github_api.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/github_api.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,23207 @@
+# generated by datamodel-codegen:
+#   filename:  github-api.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import date, datetime
+from enum import Enum
+from typing import List, Literal, Optional, TypeAlias, Union
+
+from pydantic import BaseModel, Field
+
+Base64String: TypeAlias = str
+"""
+A (potentially binary) string encoded using base64.
+"""
+
+
+BigInt: TypeAlias = int
+"""
+Represents non-fractional signed whole numeric values. Since the value may
+exceed the size of a 32-bit integer, it's encoded as a string.
+"""
+
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+Date: TypeAlias = date
+"""
+An ISO-8601 encoded date string.
+"""
+
+
+DateTime: TypeAlias = datetime
+"""
+An ISO-8601 encoded UTC date string.
+"""
+
+
+Float: TypeAlias = float
+"""
+The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](https://en.wikipedia.org/wiki/IEEE_floating_point).
+"""
+
+
+GitObjectID: TypeAlias = str
+"""
+A Git object ID.
+"""
+
+
+GitRefname: TypeAlias = str
+"""
+A fully qualified reference name (e.g. `refs/heads/master`).
+"""
+
+
+GitSSHRemote: TypeAlias = str
+"""
+Git SSH string
+"""
+
+
+GitTimestamp: TypeAlias = str
+"""
+An ISO-8601 encoded date string. Unlike the DateTime type, GitTimestamp is not converted in UTC.
+"""
+
+
+HTML: TypeAlias = str
+"""
+A string containing HTML code.
+"""
+
+
+ID: TypeAlias = str
+"""
+The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID.
+"""
+
+
+Int: TypeAlias = int
+"""
+The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
+"""
+
+
+PreciseDateTime: TypeAlias = datetime
+"""
+An ISO-8601 encoded UTC date string with millisecond precision.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+URI: TypeAlias = str
+"""
+An RFC 3986, RFC 3987, and RFC 6570 (level 4) compliant URI string.
+"""
+
+
+X509Certificate: TypeAlias = str
+"""
+A valid x509 certificate string
+"""
+
+
+class ActorType(Enum):
+    """
+    The actor's type.
+    """
+
+    TEAM = 'TEAM'
+    USER = 'USER'
+
+
+class AuditLogOrderField(Enum):
+    """
+    Properties by which Audit Log connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+
+
+class CheckAnnotationLevel(Enum):
+    """
+    Represents an annotation's information level.
+    """
+
+    FAILURE = 'FAILURE'
+    NOTICE = 'NOTICE'
+    WARNING = 'WARNING'
+
+
+class CheckConclusionState(Enum):
+    """
+    The possible states for a check suite or run conclusion.
+    """
+
+    ACTION_REQUIRED = 'ACTION_REQUIRED'
+    CANCELLED = 'CANCELLED'
+    FAILURE = 'FAILURE'
+    NEUTRAL = 'NEUTRAL'
+    SKIPPED = 'SKIPPED'
+    STALE = 'STALE'
+    STARTUP_FAILURE = 'STARTUP_FAILURE'
+    SUCCESS = 'SUCCESS'
+    TIMED_OUT = 'TIMED_OUT'
+
+
+class CheckRunState(Enum):
+    """
+    The possible states of a check run in a status rollup.
+    """
+
+    ACTION_REQUIRED = 'ACTION_REQUIRED'
+    CANCELLED = 'CANCELLED'
+    COMPLETED = 'COMPLETED'
+    FAILURE = 'FAILURE'
+    IN_PROGRESS = 'IN_PROGRESS'
+    NEUTRAL = 'NEUTRAL'
+    PENDING = 'PENDING'
+    QUEUED = 'QUEUED'
+    SKIPPED = 'SKIPPED'
+    STALE = 'STALE'
+    STARTUP_FAILURE = 'STARTUP_FAILURE'
+    SUCCESS = 'SUCCESS'
+    TIMED_OUT = 'TIMED_OUT'
+    WAITING = 'WAITING'
+
+
+class CheckRunType(Enum):
+    """
+    The possible types of check runs.
+    """
+
+    ALL = 'ALL'
+    LATEST = 'LATEST'
+
+
+class CheckStatusState(Enum):
+    """
+    The possible states for a check suite or run status.
+    """
+
+    COMPLETED = 'COMPLETED'
+    IN_PROGRESS = 'IN_PROGRESS'
+    PENDING = 'PENDING'
+    QUEUED = 'QUEUED'
+    REQUESTED = 'REQUESTED'
+    WAITING = 'WAITING'
+
+
+class CollaboratorAffiliation(Enum):
+    """
+    Collaborators affiliation level with a subject.
+    """
+
+    ALL = 'ALL'
+    DIRECT = 'DIRECT'
+    OUTSIDE = 'OUTSIDE'
+
+
+class CommentAuthorAssociation(Enum):
+    """
+    A comment author association with repository.
+    """
+
+    COLLABORATOR = 'COLLABORATOR'
+    CONTRIBUTOR = 'CONTRIBUTOR'
+    FIRST_TIMER = 'FIRST_TIMER'
+    FIRST_TIME_CONTRIBUTOR = 'FIRST_TIME_CONTRIBUTOR'
+    MANNEQUIN = 'MANNEQUIN'
+    MEMBER = 'MEMBER'
+    NONE = 'NONE'
+    OWNER = 'OWNER'
+
+
+class CommentCannotUpdateReason(Enum):
+    """
+    The possible errors that will prevent a user from updating a comment.
+    """
+
+    ARCHIVED = 'ARCHIVED'
+    DENIED = 'DENIED'
+    INSUFFICIENT_ACCESS = 'INSUFFICIENT_ACCESS'
+    LOCKED = 'LOCKED'
+    LOGIN_REQUIRED = 'LOGIN_REQUIRED'
+    MAINTENANCE = 'MAINTENANCE'
+    VERIFIED_EMAIL_REQUIRED = 'VERIFIED_EMAIL_REQUIRED'
+
+
+class CommitContributionOrderField(Enum):
+    """
+    Properties by which commit contribution connections can be ordered.
+    """
+
+    COMMIT_COUNT = 'COMMIT_COUNT'
+    OCCURRED_AT = 'OCCURRED_AT'
+
+
+class ComparisonStatus(Enum):
+    """
+    The status of a git comparison between two refs.
+    """
+
+    AHEAD = 'AHEAD'
+    BEHIND = 'BEHIND'
+    DIVERGED = 'DIVERGED'
+    IDENTICAL = 'IDENTICAL'
+
+
+class ContributionLevel(Enum):
+    """
+    Varying levels of contributions from none to many.
+    """
+
+    FIRST_QUARTILE = 'FIRST_QUARTILE'
+    FOURTH_QUARTILE = 'FOURTH_QUARTILE'
+    NONE = 'NONE'
+    SECOND_QUARTILE = 'SECOND_QUARTILE'
+    THIRD_QUARTILE = 'THIRD_QUARTILE'
+
+
+class DefaultRepositoryPermissionField(Enum):
+    """
+    The possible base permissions for repositories.
+    """
+
+    ADMIN = 'ADMIN'
+    NONE = 'NONE'
+    READ = 'READ'
+    WRITE = 'WRITE'
+
+
+class DependencyGraphEcosystem(Enum):
+    """
+    The possible ecosystems of a dependency graph package.
+    """
+
+    ACTIONS = 'ACTIONS'
+    COMPOSER = 'COMPOSER'
+    GO = 'GO'
+    MAVEN = 'MAVEN'
+    NPM = 'NPM'
+    NUGET = 'NUGET'
+    PIP = 'PIP'
+    PUB = 'PUB'
+    RUBYGEMS = 'RUBYGEMS'
+    RUST = 'RUST'
+    SWIFT = 'SWIFT'
+
+
+class DeploymentOrderField(Enum):
+    """
+    Properties by which deployment connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+
+
+class DeploymentProtectionRuleType(Enum):
+    """
+    The possible protection rule types.
+    """
+
+    REQUIRED_REVIEWERS = 'REQUIRED_REVIEWERS'
+    WAIT_TIMER = 'WAIT_TIMER'
+
+
+class DeploymentReviewState(Enum):
+    """
+    The possible states for a deployment review.
+    """
+
+    APPROVED = 'APPROVED'
+    REJECTED = 'REJECTED'
+
+
+class DeploymentState(Enum):
+    """
+    The possible states in which a deployment can be.
+    """
+
+    ABANDONED = 'ABANDONED'
+    ACTIVE = 'ACTIVE'
+    DESTROYED = 'DESTROYED'
+    ERROR = 'ERROR'
+    FAILURE = 'FAILURE'
+    INACTIVE = 'INACTIVE'
+    IN_PROGRESS = 'IN_PROGRESS'
+    PENDING = 'PENDING'
+    QUEUED = 'QUEUED'
+    SUCCESS = 'SUCCESS'
+    WAITING = 'WAITING'
+
+
+class DeploymentStatusState(Enum):
+    """
+    The possible states for a deployment status.
+    """
+
+    ERROR = 'ERROR'
+    FAILURE = 'FAILURE'
+    INACTIVE = 'INACTIVE'
+    IN_PROGRESS = 'IN_PROGRESS'
+    PENDING = 'PENDING'
+    QUEUED = 'QUEUED'
+    SUCCESS = 'SUCCESS'
+    WAITING = 'WAITING'
+
+
+class DiffSide(Enum):
+    """
+    The possible sides of a diff.
+    """
+
+    LEFT = 'LEFT'
+    RIGHT = 'RIGHT'
+
+
+class DiscussionCloseReason(Enum):
+    """
+    The possible reasons for closing a discussion.
+    """
+
+    DUPLICATE = 'DUPLICATE'
+    OUTDATED = 'OUTDATED'
+    RESOLVED = 'RESOLVED'
+
+
+class DiscussionOrderField(Enum):
+    """
+    Properties by which discussion connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    UPDATED_AT = 'UPDATED_AT'
+
+
+class DiscussionPollOptionOrderField(Enum):
+    """
+    Properties by which discussion poll option connections can be ordered.
+    """
+
+    AUTHORED_ORDER = 'AUTHORED_ORDER'
+    VOTE_COUNT = 'VOTE_COUNT'
+
+
+class DiscussionState(Enum):
+    """
+    The possible states of a discussion.
+    """
+
+    CLOSED = 'CLOSED'
+    OPEN = 'OPEN'
+
+
+class DiscussionStateReason(Enum):
+    """
+    The possible state reasons of a discussion.
+    """
+
+    DUPLICATE = 'DUPLICATE'
+    OUTDATED = 'OUTDATED'
+    REOPENED = 'REOPENED'
+    RESOLVED = 'RESOLVED'
+
+
+class DismissReason(Enum):
+    """
+    The possible reasons that a Dependabot alert was dismissed.
+    """
+
+    FIX_STARTED = 'FIX_STARTED'
+    INACCURATE = 'INACCURATE'
+    NOT_USED = 'NOT_USED'
+    NO_BANDWIDTH = 'NO_BANDWIDTH'
+    TOLERABLE_RISK = 'TOLERABLE_RISK'
+
+
+class EnterpriseAdministratorInvitationOrderField(Enum):
+    """
+    Properties by which enterprise administrator invitation connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+
+
+class EnterpriseAdministratorRole(Enum):
+    """
+    The possible administrator roles in an enterprise account.
+    """
+
+    BILLING_MANAGER = 'BILLING_MANAGER'
+    OWNER = 'OWNER'
+
+
+class EnterpriseAllowPrivateRepositoryForkingPolicyValue(Enum):
+    """
+    The possible values for the enterprise allow private repository forking policy value.
+    """
+
+    ENTERPRISE_ORGANIZATIONS = 'ENTERPRISE_ORGANIZATIONS'
+    ENTERPRISE_ORGANIZATIONS_USER_ACCOUNTS = 'ENTERPRISE_ORGANIZATIONS_USER_ACCOUNTS'
+    EVERYWHERE = 'EVERYWHERE'
+    SAME_ORGANIZATION = 'SAME_ORGANIZATION'
+    SAME_ORGANIZATION_USER_ACCOUNTS = 'SAME_ORGANIZATION_USER_ACCOUNTS'
+    USER_ACCOUNTS = 'USER_ACCOUNTS'
+
+
+class EnterpriseDefaultRepositoryPermissionSettingValue(Enum):
+    """
+    The possible values for the enterprise base repository permission setting.
+    """
+
+    ADMIN = 'ADMIN'
+    NONE = 'NONE'
+    NO_POLICY = 'NO_POLICY'
+    READ = 'READ'
+    WRITE = 'WRITE'
+
+
+class EnterpriseEnabledDisabledSettingValue(Enum):
+    """
+    The possible values for an enabled/disabled enterprise setting.
+    """
+
+    DISABLED = 'DISABLED'
+    ENABLED = 'ENABLED'
+    NO_POLICY = 'NO_POLICY'
+
+
+class EnterpriseEnabledSettingValue(Enum):
+    """
+    The possible values for an enabled/no policy enterprise setting.
+    """
+
+    ENABLED = 'ENABLED'
+    NO_POLICY = 'NO_POLICY'
+
+
+class EnterpriseMemberOrderField(Enum):
+    """
+    Properties by which enterprise member connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    LOGIN = 'LOGIN'
+
+
+class EnterpriseMembersCanCreateRepositoriesSettingValue(Enum):
+    """
+    The possible values for the enterprise members can create repositories setting.
+    """
+
+    ALL = 'ALL'
+    DISABLED = 'DISABLED'
+    NO_POLICY = 'NO_POLICY'
+    PRIVATE = 'PRIVATE'
+    PUBLIC = 'PUBLIC'
+
+
+class EnterpriseMembersCanMakePurchasesSettingValue(Enum):
+    """
+    The possible values for the members can make purchases setting.
+    """
+
+    DISABLED = 'DISABLED'
+    ENABLED = 'ENABLED'
+
+
+class EnterpriseMembershipType(Enum):
+    """
+    The possible values we have for filtering Platform::Objects::User#enterprises.
+    """
+
+    ADMIN = 'ADMIN'
+    ALL = 'ALL'
+    BILLING_MANAGER = 'BILLING_MANAGER'
+    ORG_MEMBERSHIP = 'ORG_MEMBERSHIP'
+
+
+class EnterpriseOrderField(Enum):
+    """
+    Properties by which enterprise connections can be ordered.
+    """
+
+    NAME = 'NAME'
+
+
+class EnterpriseServerInstallationOrderField(Enum):
+    """
+    Properties by which Enterprise Server installation connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    CUSTOMER_NAME = 'CUSTOMER_NAME'
+    HOST_NAME = 'HOST_NAME'
+
+
+class EnterpriseServerUserAccountEmailOrderField(Enum):
+    """
+    Properties by which Enterprise Server user account email connections can be ordered.
+    """
+
+    EMAIL = 'EMAIL'
+
+
+class EnterpriseServerUserAccountOrderField(Enum):
+    """
+    Properties by which Enterprise Server user account connections can be ordered.
+    """
+
+    LOGIN = 'LOGIN'
+    REMOTE_CREATED_AT = 'REMOTE_CREATED_AT'
+
+
+class EnterpriseServerUserAccountsUploadOrderField(Enum):
+    """
+    Properties by which Enterprise Server user accounts upload connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+
+
+class EnterpriseServerUserAccountsUploadSyncState(Enum):
+    """
+    Synchronization state of the Enterprise Server user accounts upload
+    """
+
+    FAILURE = 'FAILURE'
+    PENDING = 'PENDING'
+    SUCCESS = 'SUCCESS'
+
+
+class EnterpriseUserAccountMembershipRole(Enum):
+    """
+    The possible roles for enterprise membership.
+    """
+
+    MEMBER = 'MEMBER'
+    OWNER = 'OWNER'
+    UNAFFILIATED = 'UNAFFILIATED'
+
+
+class EnterpriseUserDeployment(Enum):
+    """
+    The possible GitHub Enterprise deployments where this user can exist.
+    """
+
+    CLOUD = 'CLOUD'
+    SERVER = 'SERVER'
+
+
+class EnvironmentOrderField(Enum):
+    """
+    Properties by which environments connections can be ordered
+    """
+
+    NAME = 'NAME'
+
+
+class FileViewedState(Enum):
+    """
+    The possible viewed states of a file .
+    """
+
+    DISMISSED = 'DISMISSED'
+    UNVIEWED = 'UNVIEWED'
+    VIEWED = 'VIEWED'
+
+
+class FundingPlatform(Enum):
+    """
+    The possible funding platforms for repository funding links.
+    """
+
+    COMMUNITY_BRIDGE = 'COMMUNITY_BRIDGE'
+    CUSTOM = 'CUSTOM'
+    GITHUB = 'GITHUB'
+    ISSUEHUNT = 'ISSUEHUNT'
+    KO_FI = 'KO_FI'
+    LFX_CROWDFUNDING = 'LFX_CROWDFUNDING'
+    LIBERAPAY = 'LIBERAPAY'
+    OPEN_COLLECTIVE = 'OPEN_COLLECTIVE'
+    OTECHIE = 'OTECHIE'
+    PATREON = 'PATREON'
+    TIDELIFT = 'TIDELIFT'
+
+
+class GistOrderField(Enum):
+    """
+    Properties by which gist connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    PUSHED_AT = 'PUSHED_AT'
+    UPDATED_AT = 'UPDATED_AT'
+
+
+class GistPrivacy(Enum):
+    """
+    The privacy of a Gist
+    """
+
+    ALL = 'ALL'
+    PUBLIC = 'PUBLIC'
+    SECRET = 'SECRET'
+
+
+class GitSignatureState(Enum):
+    """
+    The state of a Git signature.
+    """
+
+    BAD_CERT = 'BAD_CERT'
+    BAD_EMAIL = 'BAD_EMAIL'
+    EXPIRED_KEY = 'EXPIRED_KEY'
+    GPGVERIFY_ERROR = 'GPGVERIFY_ERROR'
+    GPGVERIFY_UNAVAILABLE = 'GPGVERIFY_UNAVAILABLE'
+    INVALID = 'INVALID'
+    MALFORMED_SIG = 'MALFORMED_SIG'
+    NOT_SIGNING_KEY = 'NOT_SIGNING_KEY'
+    NO_USER = 'NO_USER'
+    OCSP_ERROR = 'OCSP_ERROR'
+    OCSP_PENDING = 'OCSP_PENDING'
+    OCSP_REVOKED = 'OCSP_REVOKED'
+    UNKNOWN_KEY = 'UNKNOWN_KEY'
+    UNKNOWN_SIG_TYPE = 'UNKNOWN_SIG_TYPE'
+    UNSIGNED = 'UNSIGNED'
+    UNVERIFIED_EMAIL = 'UNVERIFIED_EMAIL'
+    VALID = 'VALID'
+
+
+class IdentityProviderConfigurationState(Enum):
+    """
+    The possible states in which authentication can be configured with an identity provider.
+    """
+
+    CONFIGURED = 'CONFIGURED'
+    ENFORCED = 'ENFORCED'
+    UNCONFIGURED = 'UNCONFIGURED'
+
+
+class IpAllowListEnabledSettingValue(Enum):
+    """
+    The possible values for the IP allow list enabled setting.
+    """
+
+    DISABLED = 'DISABLED'
+    ENABLED = 'ENABLED'
+
+
+class IpAllowListEntryOrderField(Enum):
+    """
+    Properties by which IP allow list entry connections can be ordered.
+    """
+
+    ALLOW_LIST_VALUE = 'ALLOW_LIST_VALUE'
+    CREATED_AT = 'CREATED_AT'
+
+
+class IpAllowListForInstalledAppsEnabledSettingValue(Enum):
+    """
+    The possible values for the IP allow list configuration for installed GitHub Apps setting.
+    """
+
+    DISABLED = 'DISABLED'
+    ENABLED = 'ENABLED'
+
+
+class IssueClosedStateReason(Enum):
+    """
+    The possible state reasons of a closed issue.
+    """
+
+    COMPLETED = 'COMPLETED'
+    NOT_PLANNED = 'NOT_PLANNED'
+
+
+class IssueCommentOrderField(Enum):
+    """
+    Properties by which issue comment connections can be ordered.
+    """
+
+    UPDATED_AT = 'UPDATED_AT'
+
+
+class IssueOrderField(Enum):
+    """
+    Properties by which issue connections can be ordered.
+    """
+
+    COMMENTS = 'COMMENTS'
+    CREATED_AT = 'CREATED_AT'
+    UPDATED_AT = 'UPDATED_AT'
+
+
+class IssueState(Enum):
+    """
+    The possible states of an issue.
+    """
+
+    CLOSED = 'CLOSED'
+    OPEN = 'OPEN'
+
+
+class IssueStateReason(Enum):
+    """
+    The possible state reasons of an issue.
+    """
+
+    COMPLETED = 'COMPLETED'
+    NOT_PLANNED = 'NOT_PLANNED'
+    REOPENED = 'REOPENED'
+
+
+class IssueTimelineItemsItemType(Enum):
+    """
+    The possible item types found in a timeline.
+    """
+
+    ADDED_TO_PROJECT_EVENT = 'ADDED_TO_PROJECT_EVENT'
+    ASSIGNED_EVENT = 'ASSIGNED_EVENT'
+    CLOSED_EVENT = 'CLOSED_EVENT'
+    COMMENT_DELETED_EVENT = 'COMMENT_DELETED_EVENT'
+    CONNECTED_EVENT = 'CONNECTED_EVENT'
+    CONVERTED_NOTE_TO_ISSUE_EVENT = 'CONVERTED_NOTE_TO_ISSUE_EVENT'
+    CONVERTED_TO_DISCUSSION_EVENT = 'CONVERTED_TO_DISCUSSION_EVENT'
+    CROSS_REFERENCED_EVENT = 'CROSS_REFERENCED_EVENT'
+    DEMILESTONED_EVENT = 'DEMILESTONED_EVENT'
+    DISCONNECTED_EVENT = 'DISCONNECTED_EVENT'
+    ISSUE_COMMENT = 'ISSUE_COMMENT'
+    LABELED_EVENT = 'LABELED_EVENT'
+    LOCKED_EVENT = 'LOCKED_EVENT'
+    MARKED_AS_DUPLICATE_EVENT = 'MARKED_AS_DUPLICATE_EVENT'
+    MENTIONED_EVENT = 'MENTIONED_EVENT'
+    MILESTONED_EVENT = 'MILESTONED_EVENT'
+    MOVED_COLUMNS_IN_PROJECT_EVENT = 'MOVED_COLUMNS_IN_PROJECT_EVENT'
+    PINNED_EVENT = 'PINNED_EVENT'
+    REFERENCED_EVENT = 'REFERENCED_EVENT'
+    REMOVED_FROM_PROJECT_EVENT = 'REMOVED_FROM_PROJECT_EVENT'
+    RENAMED_TITLE_EVENT = 'RENAMED_TITLE_EVENT'
+    REOPENED_EVENT = 'REOPENED_EVENT'
+    SUBSCRIBED_EVENT = 'SUBSCRIBED_EVENT'
+    TRANSFERRED_EVENT = 'TRANSFERRED_EVENT'
+    UNASSIGNED_EVENT = 'UNASSIGNED_EVENT'
+    UNLABELED_EVENT = 'UNLABELED_EVENT'
+    UNLOCKED_EVENT = 'UNLOCKED_EVENT'
+    UNMARKED_AS_DUPLICATE_EVENT = 'UNMARKED_AS_DUPLICATE_EVENT'
+    UNPINNED_EVENT = 'UNPINNED_EVENT'
+    UNSUBSCRIBED_EVENT = 'UNSUBSCRIBED_EVENT'
+    USER_BLOCKED_EVENT = 'USER_BLOCKED_EVENT'
+
+
+class LabelOrderField(Enum):
+    """
+    Properties by which label connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    NAME = 'NAME'
+
+
+class LanguageOrderField(Enum):
+    """
+    Properties by which language connections can be ordered.
+    """
+
+    SIZE = 'SIZE'
+
+
+class LockReason(Enum):
+    """
+    The possible reasons that an issue or pull request was locked.
+    """
+
+    OFF_TOPIC = 'OFF_TOPIC'
+    RESOLVED = 'RESOLVED'
+    SPAM = 'SPAM'
+    TOO_HEATED = 'TOO_HEATED'
+
+
+class MannequinOrderField(Enum):
+    """
+    Properties by which mannequins can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    LOGIN = 'LOGIN'
+
+
+class MergeCommitMessage(Enum):
+    """
+    The possible default commit messages for merges.
+    """
+
+    BLANK = 'BLANK'
+    PR_BODY = 'PR_BODY'
+    PR_TITLE = 'PR_TITLE'
+
+
+class MergeCommitTitle(Enum):
+    """
+    The possible default commit titles for merges.
+    """
+
+    MERGE_MESSAGE = 'MERGE_MESSAGE'
+    PR_TITLE = 'PR_TITLE'
+
+
+class MergeQueueEntryState(Enum):
+    """
+    The possible states for a merge queue entry.
+    """
+
+    AWAITING_CHECKS = 'AWAITING_CHECKS'
+    LOCKED = 'LOCKED'
+    MERGEABLE = 'MERGEABLE'
+    QUEUED = 'QUEUED'
+    UNMERGEABLE = 'UNMERGEABLE'
+
+
+class MergeQueueMergingStrategy(Enum):
+    """
+    The possible merging strategies for a merge queue.
+    """
+
+    ALLGREEN = 'ALLGREEN'
+    HEADGREEN = 'HEADGREEN'
+
+
+class MergeStateStatus(Enum):
+    """
+    Detailed status information about a pull request merge.
+    """
+
+    BEHIND = 'BEHIND'
+    BLOCKED = 'BLOCKED'
+    CLEAN = 'CLEAN'
+    DIRTY = 'DIRTY'
+    DRAFT = 'DRAFT'
+    HAS_HOOKS = 'HAS_HOOKS'
+    UNKNOWN = 'UNKNOWN'
+    UNSTABLE = 'UNSTABLE'
+
+
+class MergeableState(Enum):
+    """
+    Whether or not a PullRequest can be merged.
+    """
+
+    CONFLICTING = 'CONFLICTING'
+    MERGEABLE = 'MERGEABLE'
+    UNKNOWN = 'UNKNOWN'
+
+
+class MigrationSourceType(Enum):
+    """
+    Represents the different GitHub Enterprise Importer (GEI) migration sources.
+    """
+
+    AZURE_DEVOPS = 'AZURE_DEVOPS'
+    BITBUCKET_SERVER = 'BITBUCKET_SERVER'
+    GITHUB_ARCHIVE = 'GITHUB_ARCHIVE'
+
+
+class MigrationState(Enum):
+    """
+    The GitHub Enterprise Importer (GEI) migration state.
+    """
+
+    FAILED = 'FAILED'
+    FAILED_VALIDATION = 'FAILED_VALIDATION'
+    IN_PROGRESS = 'IN_PROGRESS'
+    NOT_STARTED = 'NOT_STARTED'
+    PENDING_VALIDATION = 'PENDING_VALIDATION'
+    QUEUED = 'QUEUED'
+    SUCCEEDED = 'SUCCEEDED'
+
+
+class MilestoneOrderField(Enum):
+    """
+    Properties by which milestone connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    DUE_DATE = 'DUE_DATE'
+    NUMBER = 'NUMBER'
+    UPDATED_AT = 'UPDATED_AT'
+
+
+class MilestoneState(Enum):
+    """
+    The possible states of a milestone.
+    """
+
+    CLOSED = 'CLOSED'
+    OPEN = 'OPEN'
+
+
+class NotificationRestrictionSettingValue(Enum):
+    """
+    The possible values for the notification restriction setting.
+    """
+
+    DISABLED = 'DISABLED'
+    ENABLED = 'ENABLED'
+
+
+class OIDCProviderType(Enum):
+    """
+    The OIDC identity provider type
+    """
+
+    AAD = 'AAD'
+
+
+class OauthApplicationCreateAuditEntryState(Enum):
+    """
+    The state of an OAuth application when it was created.
+    """
+
+    ACTIVE = 'ACTIVE'
+    PENDING_DELETION = 'PENDING_DELETION'
+    SUSPENDED = 'SUSPENDED'
+
+
+class OperationType(Enum):
+    """
+    The corresponding operation type for the action
+    """
+
+    ACCESS = 'ACCESS'
+    AUTHENTICATION = 'AUTHENTICATION'
+    CREATE = 'CREATE'
+    MODIFY = 'MODIFY'
+    REMOVE = 'REMOVE'
+    RESTORE = 'RESTORE'
+    TRANSFER = 'TRANSFER'
+
+
+class OrderDirection(Enum):
+    """
+    Possible directions in which to order a list of items when provided an `orderBy` argument.
+    """
+
+    ASC = 'ASC'
+    DESC = 'DESC'
+
+
+class OrgAddMemberAuditEntryPermission(Enum):
+    """
+    The permissions available to members on an Organization.
+    """
+
+    ADMIN = 'ADMIN'
+    READ = 'READ'
+
+
+class OrgCreateAuditEntryBillingPlan(Enum):
+    """
+    The billing plans available for organizations.
+    """
+
+    BUSINESS = 'BUSINESS'
+    BUSINESS_PLUS = 'BUSINESS_PLUS'
+    FREE = 'FREE'
+    TIERED_PER_SEAT = 'TIERED_PER_SEAT'
+    UNLIMITED = 'UNLIMITED'
+
+
+class OrgEnterpriseOwnerOrderField(Enum):
+    """
+    Properties by which enterprise owners can be ordered.
+    """
+
+    LOGIN = 'LOGIN'
+
+
+class OrgRemoveBillingManagerAuditEntryReason(Enum):
+    """
+    The reason a billing manager was removed from an Organization.
+    """
+
+    SAML_EXTERNAL_IDENTITY_MISSING = 'SAML_EXTERNAL_IDENTITY_MISSING'
+    SAML_SSO_ENFORCEMENT_REQUIRES_EXTERNAL_IDENTITY = (
+        'SAML_SSO_ENFORCEMENT_REQUIRES_EXTERNAL_IDENTITY'
+    )
+    TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE = 'TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE'
+
+
+class OrgRemoveMemberAuditEntryMembershipType(Enum):
+    """
+    The type of membership a user has with an Organization.
+    """
+
+    ADMIN = 'ADMIN'
+    BILLING_MANAGER = 'BILLING_MANAGER'
+    DIRECT_MEMBER = 'DIRECT_MEMBER'
+    OUTSIDE_COLLABORATOR = 'OUTSIDE_COLLABORATOR'
+    SUSPENDED = 'SUSPENDED'
+    UNAFFILIATED = 'UNAFFILIATED'
+
+
+class OrgRemoveMemberAuditEntryReason(Enum):
+    """
+    The reason a member was removed from an Organization.
+    """
+
+    SAML_EXTERNAL_IDENTITY_MISSING = 'SAML_EXTERNAL_IDENTITY_MISSING'
+    SAML_SSO_ENFORCEMENT_REQUIRES_EXTERNAL_IDENTITY = (
+        'SAML_SSO_ENFORCEMENT_REQUIRES_EXTERNAL_IDENTITY'
+    )
+    TWO_FACTOR_ACCOUNT_RECOVERY = 'TWO_FACTOR_ACCOUNT_RECOVERY'
+    TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE = 'TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE'
+    USER_ACCOUNT_DELETED = 'USER_ACCOUNT_DELETED'
+
+
+class OrgRemoveOutsideCollaboratorAuditEntryMembershipType(Enum):
+    """
+    The type of membership a user has with an Organization.
+    """
+
+    BILLING_MANAGER = 'BILLING_MANAGER'
+    OUTSIDE_COLLABORATOR = 'OUTSIDE_COLLABORATOR'
+    UNAFFILIATED = 'UNAFFILIATED'
+
+
+class OrgRemoveOutsideCollaboratorAuditEntryReason(Enum):
+    """
+    The reason an outside collaborator was removed from an Organization.
+    """
+
+    SAML_EXTERNAL_IDENTITY_MISSING = 'SAML_EXTERNAL_IDENTITY_MISSING'
+    TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE = 'TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE'
+
+
+class OrgUpdateDefaultRepositoryPermissionAuditEntryPermission(Enum):
+    """
+    The default permission a repository can have in an Organization.
+    """
+
+    ADMIN = 'ADMIN'
+    NONE = 'NONE'
+    READ = 'READ'
+    WRITE = 'WRITE'
+
+
+class OrgUpdateMemberAuditEntryPermission(Enum):
+    """
+    The permissions available to members on an Organization.
+    """
+
+    ADMIN = 'ADMIN'
+    READ = 'READ'
+
+
+class OrgUpdateMemberRepositoryCreationPermissionAuditEntryVisibility(Enum):
+    """
+    The permissions available for repository creation on an Organization.
+    """
+
+    ALL = 'ALL'
+    INTERNAL = 'INTERNAL'
+    NONE = 'NONE'
+    PRIVATE = 'PRIVATE'
+    PRIVATE_INTERNAL = 'PRIVATE_INTERNAL'
+    PUBLIC = 'PUBLIC'
+    PUBLIC_INTERNAL = 'PUBLIC_INTERNAL'
+    PUBLIC_PRIVATE = 'PUBLIC_PRIVATE'
+
+
+class OrganizationInvitationRole(Enum):
+    """
+    The possible organization invitation roles.
+    """
+
+    ADMIN = 'ADMIN'
+    BILLING_MANAGER = 'BILLING_MANAGER'
+    DIRECT_MEMBER = 'DIRECT_MEMBER'
+    REINSTATE = 'REINSTATE'
+
+
+class OrganizationInvitationSource(Enum):
+    """
+    The possible organization invitation sources.
+    """
+
+    MEMBER = 'MEMBER'
+    SCIM = 'SCIM'
+    UNKNOWN = 'UNKNOWN'
+
+
+class OrganizationInvitationType(Enum):
+    """
+    The possible organization invitation types.
+    """
+
+    EMAIL = 'EMAIL'
+    USER = 'USER'
+
+
+class OrganizationMemberRole(Enum):
+    """
+    The possible roles within an organization for its members.
+    """
+
+    ADMIN = 'ADMIN'
+    MEMBER = 'MEMBER'
+
+
+class OrganizationMembersCanCreateRepositoriesSettingValue(Enum):
+    """
+    The possible values for the members can create repositories setting on an organization.
+    """
+
+    ALL = 'ALL'
+    DISABLED = 'DISABLED'
+    INTERNAL = 'INTERNAL'
+    PRIVATE = 'PRIVATE'
+
+
+class OrganizationMigrationState(Enum):
+    """
+    The Octoshift Organization migration state.
+    """
+
+    FAILED = 'FAILED'
+    FAILED_VALIDATION = 'FAILED_VALIDATION'
+    IN_PROGRESS = 'IN_PROGRESS'
+    NOT_STARTED = 'NOT_STARTED'
+    PENDING_VALIDATION = 'PENDING_VALIDATION'
+    POST_REPO_MIGRATION = 'POST_REPO_MIGRATION'
+    PRE_REPO_MIGRATION = 'PRE_REPO_MIGRATION'
+    QUEUED = 'QUEUED'
+    REPO_MIGRATION = 'REPO_MIGRATION'
+    SUCCEEDED = 'SUCCEEDED'
+
+
+class OrganizationOrderField(Enum):
+    """
+    Properties by which organization connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    LOGIN = 'LOGIN'
+
+
+class PackageFileOrderField(Enum):
+    """
+    Properties by which package file connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+
+
+class PackageOrderField(Enum):
+    """
+    Properties by which package connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+
+
+class PackageType(Enum):
+    """
+    The possible types of a package.
+    """
+
+    DEBIAN = 'DEBIAN'
+    DOCKER = 'DOCKER'
+    MAVEN = 'MAVEN'
+    NPM = 'NPM'
+    NUGET = 'NUGET'
+    PYPI = 'PYPI'
+    RUBYGEMS = 'RUBYGEMS'
+
+
+class PackageVersionOrderField(Enum):
+    """
+    Properties by which package version connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+
+
+class PatchStatus(Enum):
+    """
+    The possible types of patch statuses.
+    """
+
+    ADDED = 'ADDED'
+    CHANGED = 'CHANGED'
+    COPIED = 'COPIED'
+    DELETED = 'DELETED'
+    MODIFIED = 'MODIFIED'
+    RENAMED = 'RENAMED'
+
+
+class PinnableItemType(Enum):
+    """
+    Represents items that can be pinned to a profile page or dashboard.
+    """
+
+    GIST = 'GIST'
+    ISSUE = 'ISSUE'
+    ORGANIZATION = 'ORGANIZATION'
+    PROJECT = 'PROJECT'
+    PULL_REQUEST = 'PULL_REQUEST'
+    REPOSITORY = 'REPOSITORY'
+    TEAM = 'TEAM'
+    USER = 'USER'
+
+
+class PinnedDiscussionGradient(Enum):
+    """
+    Preconfigured gradients that may be used to style discussions pinned within a repository.
+    """
+
+    BLUE_MINT = 'BLUE_MINT'
+    BLUE_PURPLE = 'BLUE_PURPLE'
+    PINK_BLUE = 'PINK_BLUE'
+    PURPLE_CORAL = 'PURPLE_CORAL'
+    RED_ORANGE = 'RED_ORANGE'
+
+
+class PinnedDiscussionPattern(Enum):
+    """
+    Preconfigured background patterns that may be used to style discussions pinned within a repository.
+    """
+
+    CHEVRON_UP = 'CHEVRON_UP'
+    DOT = 'DOT'
+    DOT_FILL = 'DOT_FILL'
+    HEART_FILL = 'HEART_FILL'
+    PLUS = 'PLUS'
+    ZAP = 'ZAP'
+
+
+class ProjectCardArchivedState(Enum):
+    """
+    The possible archived states of a project card.
+    """
+
+    ARCHIVED = 'ARCHIVED'
+    NOT_ARCHIVED = 'NOT_ARCHIVED'
+
+
+class ProjectCardState(Enum):
+    """
+    Various content states of a ProjectCard
+    """
+
+    CONTENT_ONLY = 'CONTENT_ONLY'
+    NOTE_ONLY = 'NOTE_ONLY'
+    REDACTED = 'REDACTED'
+
+
+class ProjectColumnPurpose(Enum):
+    """
+    The semantic purpose of the column - todo, in progress, or done.
+    """
+
+    DONE = 'DONE'
+    IN_PROGRESS = 'IN_PROGRESS'
+    TODO = 'TODO'
+
+
+class ProjectOrderField(Enum):
+    """
+    Properties by which project connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    NAME = 'NAME'
+    UPDATED_AT = 'UPDATED_AT'
+
+
+class ProjectState(Enum):
+    """
+    State of the project; either 'open' or 'closed'
+    """
+
+    CLOSED = 'CLOSED'
+    OPEN = 'OPEN'
+
+
+class ProjectTemplate(Enum):
+    """
+    GitHub-provided templates for Projects
+    """
+
+    AUTOMATED_KANBAN_V2 = 'AUTOMATED_KANBAN_V2'
+    AUTOMATED_REVIEWS_KANBAN = 'AUTOMATED_REVIEWS_KANBAN'
+    BASIC_KANBAN = 'BASIC_KANBAN'
+    BUG_TRIAGE = 'BUG_TRIAGE'
+
+
+class ProjectV2CustomFieldType(Enum):
+    """
+    The type of a project field.
+    """
+
+    DATE = 'DATE'
+    NUMBER = 'NUMBER'
+    SINGLE_SELECT = 'SINGLE_SELECT'
+    TEXT = 'TEXT'
+
+
+class ProjectV2FieldOrderField(Enum):
+    """
+    Properties by which project v2 field connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    NAME = 'NAME'
+    POSITION = 'POSITION'
+
+
+class ProjectV2FieldType(Enum):
+    """
+    The type of a project field.
+    """
+
+    ASSIGNEES = 'ASSIGNEES'
+    DATE = 'DATE'
+    ITERATION = 'ITERATION'
+    LABELS = 'LABELS'
+    LINKED_PULL_REQUESTS = 'LINKED_PULL_REQUESTS'
+    MILESTONE = 'MILESTONE'
+    NUMBER = 'NUMBER'
+    REPOSITORY = 'REPOSITORY'
+    REVIEWERS = 'REVIEWERS'
+    SINGLE_SELECT = 'SINGLE_SELECT'
+    TEXT = 'TEXT'
+    TITLE = 'TITLE'
+    TRACKED_BY = 'TRACKED_BY'
+    TRACKS = 'TRACKS'
+
+
+class ProjectV2ItemFieldValueOrderField(Enum):
+    """
+    Properties by which project v2 item field value connections can be ordered.
+    """
+
+    POSITION = 'POSITION'
+
+
+class ProjectV2ItemOrderField(Enum):
+    """
+    Properties by which project v2 item connections can be ordered.
+    """
+
+    POSITION = 'POSITION'
+
+
+class ProjectV2ItemType(Enum):
+    """
+    The type of a project item.
+    """
+
+    DRAFT_ISSUE = 'DRAFT_ISSUE'
+    ISSUE = 'ISSUE'
+    PULL_REQUEST = 'PULL_REQUEST'
+    REDACTED = 'REDACTED'
+
+
+class ProjectV2OrderField(Enum):
+    """
+    Properties by which projects can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    NUMBER = 'NUMBER'
+    TITLE = 'TITLE'
+    UPDATED_AT = 'UPDATED_AT'
+
+
+class ProjectV2Roles(Enum):
+    """
+    The possible roles of a collaborator on a project.
+    """
+
+    ADMIN = 'ADMIN'
+    NONE = 'NONE'
+    READER = 'READER'
+    WRITER = 'WRITER'
+
+
+class ProjectV2SingleSelectFieldOptionColor(Enum):
+    """
+    The display color of a single-select field option.
+    """
+
+    BLUE = 'BLUE'
+    GRAY = 'GRAY'
+    GREEN = 'GREEN'
+    ORANGE = 'ORANGE'
+    PINK = 'PINK'
+    PURPLE = 'PURPLE'
+    RED = 'RED'
+    YELLOW = 'YELLOW'
+
+
+class ProjectV2State(Enum):
+    """
+    The possible states of a project v2.
+    """
+
+    CLOSED = 'CLOSED'
+    OPEN = 'OPEN'
+
+
+class ProjectV2ViewLayout(Enum):
+    """
+    The layout of a project v2 view.
+    """
+
+    BOARD_LAYOUT = 'BOARD_LAYOUT'
+    ROADMAP_LAYOUT = 'ROADMAP_LAYOUT'
+    TABLE_LAYOUT = 'TABLE_LAYOUT'
+
+
+class ProjectV2ViewOrderField(Enum):
+    """
+    Properties by which project v2 view connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    NAME = 'NAME'
+    POSITION = 'POSITION'
+
+
+class ProjectV2WorkflowsOrderField(Enum):
+    """
+    Properties by which project workflows can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    NAME = 'NAME'
+    NUMBER = 'NUMBER'
+    UPDATED_AT = 'UPDATED_AT'
+
+
+class PullRequestBranchUpdateMethod(Enum):
+    """
+    The possible methods for updating a pull request's head branch with the base branch.
+    """
+
+    MERGE = 'MERGE'
+    REBASE = 'REBASE'
+
+
+class PullRequestMergeMethod(Enum):
+    """
+    Represents available types of methods to use when merging a pull request.
+    """
+
+    MERGE = 'MERGE'
+    REBASE = 'REBASE'
+    SQUASH = 'SQUASH'
+
+
+class PullRequestOrderField(Enum):
+    """
+    Properties by which pull_requests connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    UPDATED_AT = 'UPDATED_AT'
+
+
+class PullRequestReviewCommentState(Enum):
+    """
+    The possible states of a pull request review comment.
+    """
+
+    PENDING = 'PENDING'
+    SUBMITTED = 'SUBMITTED'
+
+
+class PullRequestReviewDecision(Enum):
+    """
+    The review status of a pull request.
+    """
+
+    APPROVED = 'APPROVED'
+    CHANGES_REQUESTED = 'CHANGES_REQUESTED'
+    REVIEW_REQUIRED = 'REVIEW_REQUIRED'
+
+
+class PullRequestReviewEvent(Enum):
+    """
+    The possible events to perform on a pull request review.
+    """
+
+    APPROVE = 'APPROVE'
+    COMMENT = 'COMMENT'
+    DISMISS = 'DISMISS'
+    REQUEST_CHANGES = 'REQUEST_CHANGES'
+
+
+class PullRequestReviewState(Enum):
+    """
+    The possible states of a pull request review.
+    """
+
+    APPROVED = 'APPROVED'
+    CHANGES_REQUESTED = 'CHANGES_REQUESTED'
+    COMMENTED = 'COMMENTED'
+    DISMISSED = 'DISMISSED'
+    PENDING = 'PENDING'
+
+
+class PullRequestReviewThreadSubjectType(Enum):
+    """
+    The possible subject types of a pull request review comment.
+    """
+
+    FILE = 'FILE'
+    LINE = 'LINE'
+
+
+class PullRequestState(Enum):
+    """
+    The possible states of a pull request.
+    """
+
+    CLOSED = 'CLOSED'
+    MERGED = 'MERGED'
+    OPEN = 'OPEN'
+
+
+class PullRequestTimelineItemsItemType(Enum):
+    """
+    The possible item types found in a timeline.
+    """
+
+    ADDED_TO_MERGE_QUEUE_EVENT = 'ADDED_TO_MERGE_QUEUE_EVENT'
+    ADDED_TO_PROJECT_EVENT = 'ADDED_TO_PROJECT_EVENT'
+    ASSIGNED_EVENT = 'ASSIGNED_EVENT'
+    AUTOMATIC_BASE_CHANGE_FAILED_EVENT = 'AUTOMATIC_BASE_CHANGE_FAILED_EVENT'
+    AUTOMATIC_BASE_CHANGE_SUCCEEDED_EVENT = 'AUTOMATIC_BASE_CHANGE_SUCCEEDED_EVENT'
+    AUTO_MERGE_DISABLED_EVENT = 'AUTO_MERGE_DISABLED_EVENT'
+    AUTO_MERGE_ENABLED_EVENT = 'AUTO_MERGE_ENABLED_EVENT'
+    AUTO_REBASE_ENABLED_EVENT = 'AUTO_REBASE_ENABLED_EVENT'
+    AUTO_SQUASH_ENABLED_EVENT = 'AUTO_SQUASH_ENABLED_EVENT'
+    BASE_REF_CHANGED_EVENT = 'BASE_REF_CHANGED_EVENT'
+    BASE_REF_DELETED_EVENT = 'BASE_REF_DELETED_EVENT'
+    BASE_REF_FORCE_PUSHED_EVENT = 'BASE_REF_FORCE_PUSHED_EVENT'
+    CLOSED_EVENT = 'CLOSED_EVENT'
+    COMMENT_DELETED_EVENT = 'COMMENT_DELETED_EVENT'
+    CONNECTED_EVENT = 'CONNECTED_EVENT'
+    CONVERTED_NOTE_TO_ISSUE_EVENT = 'CONVERTED_NOTE_TO_ISSUE_EVENT'
+    CONVERTED_TO_DISCUSSION_EVENT = 'CONVERTED_TO_DISCUSSION_EVENT'
+    CONVERT_TO_DRAFT_EVENT = 'CONVERT_TO_DRAFT_EVENT'
+    CROSS_REFERENCED_EVENT = 'CROSS_REFERENCED_EVENT'
+    DEMILESTONED_EVENT = 'DEMILESTONED_EVENT'
+    DEPLOYED_EVENT = 'DEPLOYED_EVENT'
+    DEPLOYMENT_ENVIRONMENT_CHANGED_EVENT = 'DEPLOYMENT_ENVIRONMENT_CHANGED_EVENT'
+    DISCONNECTED_EVENT = 'DISCONNECTED_EVENT'
+    HEAD_REF_DELETED_EVENT = 'HEAD_REF_DELETED_EVENT'
+    HEAD_REF_FORCE_PUSHED_EVENT = 'HEAD_REF_FORCE_PUSHED_EVENT'
+    HEAD_REF_RESTORED_EVENT = 'HEAD_REF_RESTORED_EVENT'
+    ISSUE_COMMENT = 'ISSUE_COMMENT'
+    LABELED_EVENT = 'LABELED_EVENT'
+    LOCKED_EVENT = 'LOCKED_EVENT'
+    MARKED_AS_DUPLICATE_EVENT = 'MARKED_AS_DUPLICATE_EVENT'
+    MENTIONED_EVENT = 'MENTIONED_EVENT'
+    MERGED_EVENT = 'MERGED_EVENT'
+    MILESTONED_EVENT = 'MILESTONED_EVENT'
+    MOVED_COLUMNS_IN_PROJECT_EVENT = 'MOVED_COLUMNS_IN_PROJECT_EVENT'
+    PINNED_EVENT = 'PINNED_EVENT'
+    PULL_REQUEST_COMMIT = 'PULL_REQUEST_COMMIT'
+    PULL_REQUEST_COMMIT_COMMENT_THREAD = 'PULL_REQUEST_COMMIT_COMMENT_THREAD'
+    PULL_REQUEST_REVIEW = 'PULL_REQUEST_REVIEW'
+    PULL_REQUEST_REVIEW_THREAD = 'PULL_REQUEST_REVIEW_THREAD'
+    PULL_REQUEST_REVISION_MARKER = 'PULL_REQUEST_REVISION_MARKER'
+    READY_FOR_REVIEW_EVENT = 'READY_FOR_REVIEW_EVENT'
+    REFERENCED_EVENT = 'REFERENCED_EVENT'
+    REMOVED_FROM_MERGE_QUEUE_EVENT = 'REMOVED_FROM_MERGE_QUEUE_EVENT'
+    REMOVED_FROM_PROJECT_EVENT = 'REMOVED_FROM_PROJECT_EVENT'
+    RENAMED_TITLE_EVENT = 'RENAMED_TITLE_EVENT'
+    REOPENED_EVENT = 'REOPENED_EVENT'
+    REVIEW_DISMISSED_EVENT = 'REVIEW_DISMISSED_EVENT'
+    REVIEW_REQUESTED_EVENT = 'REVIEW_REQUESTED_EVENT'
+    REVIEW_REQUEST_REMOVED_EVENT = 'REVIEW_REQUEST_REMOVED_EVENT'
+    SUBSCRIBED_EVENT = 'SUBSCRIBED_EVENT'
+    TRANSFERRED_EVENT = 'TRANSFERRED_EVENT'
+    UNASSIGNED_EVENT = 'UNASSIGNED_EVENT'
+    UNLABELED_EVENT = 'UNLABELED_EVENT'
+    UNLOCKED_EVENT = 'UNLOCKED_EVENT'
+    UNMARKED_AS_DUPLICATE_EVENT = 'UNMARKED_AS_DUPLICATE_EVENT'
+    UNPINNED_EVENT = 'UNPINNED_EVENT'
+    UNSUBSCRIBED_EVENT = 'UNSUBSCRIBED_EVENT'
+    USER_BLOCKED_EVENT = 'USER_BLOCKED_EVENT'
+
+
+class PullRequestUpdateState(Enum):
+    """
+    The possible target states when updating a pull request.
+    """
+
+    CLOSED = 'CLOSED'
+    OPEN = 'OPEN'
+
+
+class ReactionContent(Enum):
+    """
+    Emojis that can be attached to Issues, Pull Requests and Comments.
+    """
+
+    CONFUSED = 'CONFUSED'
+    EYES = 'EYES'
+    HEART = 'HEART'
+    HOORAY = 'HOORAY'
+    LAUGH = 'LAUGH'
+    ROCKET = 'ROCKET'
+    THUMBS_DOWN = 'THUMBS_DOWN'
+    THUMBS_UP = 'THUMBS_UP'
+
+
+class ReactionOrderField(Enum):
+    """
+    A list of fields that reactions can be ordered by.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+
+
+class RefOrderField(Enum):
+    """
+    Properties by which ref connections can be ordered.
+    """
+
+    ALPHABETICAL = 'ALPHABETICAL'
+    TAG_COMMIT_DATE = 'TAG_COMMIT_DATE'
+
+
+class ReleaseOrderField(Enum):
+    """
+    Properties by which release connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    NAME = 'NAME'
+
+
+class RepoAccessAuditEntryVisibility(Enum):
+    """
+    The privacy of a repository
+    """
+
+    INTERNAL = 'INTERNAL'
+    PRIVATE = 'PRIVATE'
+    PUBLIC = 'PUBLIC'
+
+
+class RepoAddMemberAuditEntryVisibility(Enum):
+    """
+    The privacy of a repository
+    """
+
+    INTERNAL = 'INTERNAL'
+    PRIVATE = 'PRIVATE'
+    PUBLIC = 'PUBLIC'
+
+
+class RepoArchivedAuditEntryVisibility(Enum):
+    """
+    The privacy of a repository
+    """
+
+    INTERNAL = 'INTERNAL'
+    PRIVATE = 'PRIVATE'
+    PUBLIC = 'PUBLIC'
+
+
+class RepoChangeMergeSettingAuditEntryMergeType(Enum):
+    """
+    The merge options available for pull requests to this repository.
+    """
+
+    MERGE = 'MERGE'
+    REBASE = 'REBASE'
+    SQUASH = 'SQUASH'
+
+
+class RepoCreateAuditEntryVisibility(Enum):
+    """
+    The privacy of a repository
+    """
+
+    INTERNAL = 'INTERNAL'
+    PRIVATE = 'PRIVATE'
+    PUBLIC = 'PUBLIC'
+
+
+class RepoDestroyAuditEntryVisibility(Enum):
+    """
+    The privacy of a repository
+    """
+
+    INTERNAL = 'INTERNAL'
+    PRIVATE = 'PRIVATE'
+    PUBLIC = 'PUBLIC'
+
+
+class RepoRemoveMemberAuditEntryVisibility(Enum):
+    """
+    The privacy of a repository
+    """
+
+    INTERNAL = 'INTERNAL'
+    PRIVATE = 'PRIVATE'
+    PUBLIC = 'PUBLIC'
+
+
+class ReportedContentClassifiers(Enum):
+    """
+    The reasons a piece of content can be reported or minimized.
+    """
+
+    ABUSE = 'ABUSE'
+    DUPLICATE = 'DUPLICATE'
+    OFF_TOPIC = 'OFF_TOPIC'
+    OUTDATED = 'OUTDATED'
+    RESOLVED = 'RESOLVED'
+    SPAM = 'SPAM'
+
+
+class RepositoryAffiliation(Enum):
+    """
+    The affiliation of a user to a repository
+    """
+
+    COLLABORATOR = 'COLLABORATOR'
+    ORGANIZATION_MEMBER = 'ORGANIZATION_MEMBER'
+    OWNER = 'OWNER'
+
+
+class RepositoryContributionType(Enum):
+    """
+    The reason a repository is listed as 'contributed'.
+    """
+
+    COMMIT = 'COMMIT'
+    ISSUE = 'ISSUE'
+    PULL_REQUEST = 'PULL_REQUEST'
+    PULL_REQUEST_REVIEW = 'PULL_REQUEST_REVIEW'
+    REPOSITORY = 'REPOSITORY'
+
+
+class RepositoryInteractionLimit(Enum):
+    """
+    A repository interaction limit.
+    """
+
+    COLLABORATORS_ONLY = 'COLLABORATORS_ONLY'
+    CONTRIBUTORS_ONLY = 'CONTRIBUTORS_ONLY'
+    EXISTING_USERS = 'EXISTING_USERS'
+    NO_LIMIT = 'NO_LIMIT'
+
+
+class RepositoryInteractionLimitExpiry(Enum):
+    """
+    The length for a repository interaction limit to be enabled for.
+    """
+
+    ONE_DAY = 'ONE_DAY'
+    ONE_MONTH = 'ONE_MONTH'
+    ONE_WEEK = 'ONE_WEEK'
+    SIX_MONTHS = 'SIX_MONTHS'
+    THREE_DAYS = 'THREE_DAYS'
+
+
+class RepositoryInteractionLimitOrigin(Enum):
+    """
+    Indicates where an interaction limit is configured.
+    """
+
+    ORGANIZATION = 'ORGANIZATION'
+    REPOSITORY = 'REPOSITORY'
+    USER = 'USER'
+
+
+class RepositoryInvitationOrderField(Enum):
+    """
+    Properties by which repository invitation connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+
+
+class RepositoryLockReason(Enum):
+    """
+    The possible reasons a given repository could be in a locked state.
+    """
+
+    BILLING = 'BILLING'
+    MIGRATING = 'MIGRATING'
+    MOVING = 'MOVING'
+    RENAME = 'RENAME'
+    TRADE_RESTRICTION = 'TRADE_RESTRICTION'
+    TRANSFERRING_OWNERSHIP = 'TRANSFERRING_OWNERSHIP'
+
+
+class RepositoryMigrationOrderDirection(Enum):
+    """
+    Possible directions in which to order a list of repository migrations when provided an `orderBy` argument.
+    """
+
+    ASC = 'ASC'
+    DESC = 'DESC'
+
+
+class RepositoryMigrationOrderField(Enum):
+    """
+    Properties by which repository migrations can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+
+
+class RepositoryOrderField(Enum):
+    """
+    Properties by which repository connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    NAME = 'NAME'
+    PUSHED_AT = 'PUSHED_AT'
+    STARGAZERS = 'STARGAZERS'
+    UPDATED_AT = 'UPDATED_AT'
+
+
+class RepositoryPermission(Enum):
+    """
+    The access level to a repository
+    """
+
+    ADMIN = 'ADMIN'
+    MAINTAIN = 'MAINTAIN'
+    READ = 'READ'
+    TRIAGE = 'TRIAGE'
+    WRITE = 'WRITE'
+
+
+class RepositoryPrivacy(Enum):
+    """
+    The privacy of a repository
+    """
+
+    PRIVATE = 'PRIVATE'
+    PUBLIC = 'PUBLIC'
+
+
+class RepositoryRuleType(Enum):
+    """
+    The rule types supported in rulesets
+    """
+
+    AUTHORIZATION = 'AUTHORIZATION'
+    BRANCH_NAME_PATTERN = 'BRANCH_NAME_PATTERN'
+    COMMITTER_EMAIL_PATTERN = 'COMMITTER_EMAIL_PATTERN'
+    COMMIT_AUTHOR_EMAIL_PATTERN = 'COMMIT_AUTHOR_EMAIL_PATTERN'
+    COMMIT_MESSAGE_PATTERN = 'COMMIT_MESSAGE_PATTERN'
+    CREATION = 'CREATION'
+    DELETION = 'DELETION'
+    LOCK_BRANCH = 'LOCK_BRANCH'
+    MAX_REF_UPDATES = 'MAX_REF_UPDATES'
+    MERGE_QUEUE = 'MERGE_QUEUE'
+    MERGE_QUEUE_LOCKED_REF = 'MERGE_QUEUE_LOCKED_REF'
+    NON_FAST_FORWARD = 'NON_FAST_FORWARD'
+    PULL_REQUEST = 'PULL_REQUEST'
+    REQUIRED_DEPLOYMENTS = 'REQUIRED_DEPLOYMENTS'
+    REQUIRED_LINEAR_HISTORY = 'REQUIRED_LINEAR_HISTORY'
+    REQUIRED_REVIEW_THREAD_RESOLUTION = 'REQUIRED_REVIEW_THREAD_RESOLUTION'
+    REQUIRED_SIGNATURES = 'REQUIRED_SIGNATURES'
+    REQUIRED_STATUS_CHECKS = 'REQUIRED_STATUS_CHECKS'
+    REQUIRED_WORKFLOW_STATUS_CHECKS = 'REQUIRED_WORKFLOW_STATUS_CHECKS'
+    RULESET_REQUIRED_SIGNATURES = 'RULESET_REQUIRED_SIGNATURES'
+    SECRET_SCANNING = 'SECRET_SCANNING'
+    TAG = 'TAG'
+    TAG_NAME_PATTERN = 'TAG_NAME_PATTERN'
+    UPDATE = 'UPDATE'
+    WORKFLOWS = 'WORKFLOWS'
+    WORKFLOW_UPDATES = 'WORKFLOW_UPDATES'
+
+
+class RepositoryRulesetBypassActorBypassMode(Enum):
+    """
+    The bypass mode for a specific actor on a ruleset.
+    """
+
+    ALWAYS = 'ALWAYS'
+    PULL_REQUEST = 'PULL_REQUEST'
+
+
+class RepositoryRulesetTarget(Enum):
+    """
+    The targets supported for rulesets
+    """
+
+    BRANCH = 'BRANCH'
+    TAG = 'TAG'
+
+
+class RepositoryVisibility(Enum):
+    """
+    The repository's visibility level.
+    """
+
+    INTERNAL = 'INTERNAL'
+    PRIVATE = 'PRIVATE'
+    PUBLIC = 'PUBLIC'
+
+
+class RepositoryVulnerabilityAlertDependencyScope(Enum):
+    """
+    The possible scopes of an alert's dependency.
+    """
+
+    DEVELOPMENT = 'DEVELOPMENT'
+    RUNTIME = 'RUNTIME'
+
+
+class RepositoryVulnerabilityAlertState(Enum):
+    """
+    The possible states of an alert
+    """
+
+    AUTO_DISMISSED = 'AUTO_DISMISSED'
+    DISMISSED = 'DISMISSED'
+    FIXED = 'FIXED'
+    OPEN = 'OPEN'
+
+
+class RequestableCheckStatusState(Enum):
+    """
+    The possible states that can be requested when creating a check run.
+    """
+
+    COMPLETED = 'COMPLETED'
+    IN_PROGRESS = 'IN_PROGRESS'
+    PENDING = 'PENDING'
+    QUEUED = 'QUEUED'
+    WAITING = 'WAITING'
+
+
+class RoleInOrganization(Enum):
+    """
+    Possible roles a user may have in relation to an organization.
+    """
+
+    DIRECT_MEMBER = 'DIRECT_MEMBER'
+    OWNER = 'OWNER'
+    UNAFFILIATED = 'UNAFFILIATED'
+
+
+class RuleEnforcement(Enum):
+    """
+    The level of enforcement for a rule or ruleset.
+    """
+
+    ACTIVE = 'ACTIVE'
+    DISABLED = 'DISABLED'
+    EVALUATE = 'EVALUATE'
+
+
+class SamlDigestAlgorithm(Enum):
+    """
+    The possible digest algorithms used to sign SAML requests for an identity provider.
+    """
+
+    SHA1 = 'SHA1'
+    SHA256 = 'SHA256'
+    SHA384 = 'SHA384'
+    SHA512 = 'SHA512'
+
+
+class SamlSignatureAlgorithm(Enum):
+    """
+    The possible signature algorithms used to sign SAML requests for a Identity Provider.
+    """
+
+    RSA_SHA1 = 'RSA_SHA1'
+    RSA_SHA256 = 'RSA_SHA256'
+    RSA_SHA384 = 'RSA_SHA384'
+    RSA_SHA512 = 'RSA_SHA512'
+
+
+class SavedReplyOrderField(Enum):
+    """
+    Properties by which saved reply connections can be ordered.
+    """
+
+    UPDATED_AT = 'UPDATED_AT'
+
+
+class SearchType(Enum):
+    """
+    Represents the individual results of a search.
+    """
+
+    DISCUSSION = 'DISCUSSION'
+    ISSUE = 'ISSUE'
+    REPOSITORY = 'REPOSITORY'
+    USER = 'USER'
+
+
+class SecurityAdvisoryClassification(Enum):
+    """
+    Classification of the advisory.
+    """
+
+    GENERAL = 'GENERAL'
+    MALWARE = 'MALWARE'
+
+
+class SecurityAdvisoryEcosystem(Enum):
+    """
+    The possible ecosystems of a security vulnerability's package.
+    """
+
+    ACTIONS = 'ACTIONS'
+    COMPOSER = 'COMPOSER'
+    ERLANG = 'ERLANG'
+    GO = 'GO'
+    MAVEN = 'MAVEN'
+    NPM = 'NPM'
+    NUGET = 'NUGET'
+    PIP = 'PIP'
+    PUB = 'PUB'
+    RUBYGEMS = 'RUBYGEMS'
+    RUST = 'RUST'
+    SWIFT = 'SWIFT'
+
+
+class SecurityAdvisoryIdentifierType(Enum):
+    """
+    Identifier formats available for advisories.
+    """
+
+    CVE = 'CVE'
+    GHSA = 'GHSA'
+
+
+class SecurityAdvisoryOrderField(Enum):
+    """
+    Properties by which security advisory connections can be ordered.
+    """
+
+    PUBLISHED_AT = 'PUBLISHED_AT'
+    UPDATED_AT = 'UPDATED_AT'
+
+
+class SecurityAdvisorySeverity(Enum):
+    """
+    Severity of the vulnerability.
+    """
+
+    CRITICAL = 'CRITICAL'
+    HIGH = 'HIGH'
+    LOW = 'LOW'
+    MODERATE = 'MODERATE'
+
+
+class SecurityVulnerabilityOrderField(Enum):
+    """
+    Properties by which security vulnerability connections can be ordered.
+    """
+
+    UPDATED_AT = 'UPDATED_AT'
+
+
+class SocialAccountProvider(Enum):
+    """
+    Software or company that hosts social media accounts.
+    """
+
+    FACEBOOK = 'FACEBOOK'
+    GENERIC = 'GENERIC'
+    HOMETOWN = 'HOMETOWN'
+    INSTAGRAM = 'INSTAGRAM'
+    LINKEDIN = 'LINKEDIN'
+    MASTODON = 'MASTODON'
+    NPM = 'NPM'
+    REDDIT = 'REDDIT'
+    TWITCH = 'TWITCH'
+    TWITTER = 'TWITTER'
+    YOUTUBE = 'YOUTUBE'
+
+
+class SponsorOrderField(Enum):
+    """
+    Properties by which sponsor connections can be ordered.
+    """
+
+    LOGIN = 'LOGIN'
+    RELEVANCE = 'RELEVANCE'
+
+
+class SponsorableOrderField(Enum):
+    """
+    Properties by which sponsorable connections can be ordered.
+    """
+
+    LOGIN = 'LOGIN'
+
+
+class SponsorsActivityAction(Enum):
+    """
+    The possible actions that GitHub Sponsors activities can represent.
+    """
+
+    CANCELLED_SPONSORSHIP = 'CANCELLED_SPONSORSHIP'
+    NEW_SPONSORSHIP = 'NEW_SPONSORSHIP'
+    PENDING_CHANGE = 'PENDING_CHANGE'
+    REFUND = 'REFUND'
+    SPONSOR_MATCH_DISABLED = 'SPONSOR_MATCH_DISABLED'
+    TIER_CHANGE = 'TIER_CHANGE'
+
+
+class SponsorsActivityOrderField(Enum):
+    """
+    Properties by which GitHub Sponsors activity connections can be ordered.
+    """
+
+    TIMESTAMP = 'TIMESTAMP'
+
+
+class SponsorsActivityPeriod(Enum):
+    """
+    The possible time periods for which Sponsors activities can be requested.
+    """
+
+    ALL = 'ALL'
+    DAY = 'DAY'
+    MONTH = 'MONTH'
+    WEEK = 'WEEK'
+
+
+class SponsorsCountryOrRegionCode(Enum):
+    """
+    Represents countries or regions for billing and residence for a GitHub Sponsors profile.
+    """
+
+    AD = 'AD'
+    AE = 'AE'
+    AF = 'AF'
+    AG = 'AG'
+    AI = 'AI'
+    AL = 'AL'
+    AM = 'AM'
+    AO = 'AO'
+    AQ = 'AQ'
+    AR = 'AR'
+    AS = 'AS'
+    AT = 'AT'
+    AU = 'AU'
+    AW = 'AW'
+    AX = 'AX'
+    AZ = 'AZ'
+    BA = 'BA'
+    BB = 'BB'
+    BD = 'BD'
+    BE = 'BE'
+    BF = 'BF'
+    BG = 'BG'
+    BH = 'BH'
+    BI = 'BI'
+    BJ = 'BJ'
+    BL = 'BL'
+    BM = 'BM'
+    BN = 'BN'
+    BO = 'BO'
+    BQ = 'BQ'
+    BR = 'BR'
+    BS = 'BS'
+    BT = 'BT'
+    BV = 'BV'
+    BW = 'BW'
+    BY = 'BY'
+    BZ = 'BZ'
+    CA = 'CA'
+    CC = 'CC'
+    CD = 'CD'
+    CF = 'CF'
+    CG = 'CG'
+    CH = 'CH'
+    CI = 'CI'
+    CK = 'CK'
+    CL = 'CL'
+    CM = 'CM'
+    CN = 'CN'
+    CO = 'CO'
+    CR = 'CR'
+    CV = 'CV'
+    CW = 'CW'
+    CX = 'CX'
+    CY = 'CY'
+    CZ = 'CZ'
+    DE = 'DE'
+    DJ = 'DJ'
+    DK = 'DK'
+    DM = 'DM'
+    DO = 'DO'
+    DZ = 'DZ'
+    EC = 'EC'
+    EE = 'EE'
+    EG = 'EG'
+    EH = 'EH'
+    ER = 'ER'
+    ES = 'ES'
+    ET = 'ET'
+    FI = 'FI'
+    FJ = 'FJ'
+    FK = 'FK'
+    FM = 'FM'
+    FO = 'FO'
+    FR = 'FR'
+    GA = 'GA'
+    GB = 'GB'
+    GD = 'GD'
+    GE = 'GE'
+    GF = 'GF'
+    GG = 'GG'
+    GH = 'GH'
+    GI = 'GI'
+    GL = 'GL'
+    GM = 'GM'
+    GN = 'GN'
+    GP = 'GP'
+    GQ = 'GQ'
+    GR = 'GR'
+    GS = 'GS'
+    GT = 'GT'
+    GU = 'GU'
+    GW = 'GW'
+    GY = 'GY'
+    HK = 'HK'
+    HM = 'HM'
+    HN = 'HN'
+    HR = 'HR'
+    HT = 'HT'
+    HU = 'HU'
+    ID = 'ID'
+    IE = 'IE'
+    IL = 'IL'
+    IM = 'IM'
+    IN = 'IN'
+    IO = 'IO'
+    IQ = 'IQ'
+    IR = 'IR'
+    IS = 'IS'
+    IT = 'IT'
+    JE = 'JE'
+    JM = 'JM'
+    JO = 'JO'
+    JP = 'JP'
+    KE = 'KE'
+    KG = 'KG'
+    KH = 'KH'
+    KI = 'KI'
+    KM = 'KM'
+    KN = 'KN'
+    KR = 'KR'
+    KW = 'KW'
+    KY = 'KY'
+    KZ = 'KZ'
+    LA = 'LA'
+    LB = 'LB'
+    LC = 'LC'
+    LI = 'LI'
+    LK = 'LK'
+    LR = 'LR'
+    LS = 'LS'
+    LT = 'LT'
+    LU = 'LU'
+    LV = 'LV'
+    LY = 'LY'
+    MA = 'MA'
+    MC = 'MC'
+    MD = 'MD'
+    ME = 'ME'
+    MF = 'MF'
+    MG = 'MG'
+    MH = 'MH'
+    MK = 'MK'
+    ML = 'ML'
+    MM = 'MM'
+    MN = 'MN'
+    MO = 'MO'
+    MP = 'MP'
+    MQ = 'MQ'
+    MR = 'MR'
+    MS = 'MS'
+    MT = 'MT'
+    MU = 'MU'
+    MV = 'MV'
+    MW = 'MW'
+    MX = 'MX'
+    MY = 'MY'
+    MZ = 'MZ'
+    NA = 'NA'
+    NC = 'NC'
+    NE = 'NE'
+    NF = 'NF'
+    NG = 'NG'
+    NI = 'NI'
+    NL = 'NL'
+    NO = 'NO'
+    NP = 'NP'
+    NR = 'NR'
+    NU = 'NU'
+    NZ = 'NZ'
+    OM = 'OM'
+    PA = 'PA'
+    PE = 'PE'
+    PF = 'PF'
+    PG = 'PG'
+    PH = 'PH'
+    PK = 'PK'
+    PL = 'PL'
+    PM = 'PM'
+    PN = 'PN'
+    PR = 'PR'
+    PS = 'PS'
+    PT = 'PT'
+    PW = 'PW'
+    PY = 'PY'
+    QA = 'QA'
+    RE = 'RE'
+    RO = 'RO'
+    RS = 'RS'
+    RU = 'RU'
+    RW = 'RW'
+    SA = 'SA'
+    SB = 'SB'
+    SC = 'SC'
+    SD = 'SD'
+    SE = 'SE'
+    SG = 'SG'
+    SH = 'SH'
+    SI = 'SI'
+    SJ = 'SJ'
+    SK = 'SK'
+    SL = 'SL'
+    SM = 'SM'
+    SN = 'SN'
+    SO = 'SO'
+    SR = 'SR'
+    SS = 'SS'
+    ST = 'ST'
+    SV = 'SV'
+    SX = 'SX'
+    SZ = 'SZ'
+    TC = 'TC'
+    TD = 'TD'
+    TF = 'TF'
+    TG = 'TG'
+    TH = 'TH'
+    TJ = 'TJ'
+    TK = 'TK'
+    TL = 'TL'
+    TM = 'TM'
+    TN = 'TN'
+    TO = 'TO'
+    TR = 'TR'
+    TT = 'TT'
+    TV = 'TV'
+    TW = 'TW'
+    TZ = 'TZ'
+    UA = 'UA'
+    UG = 'UG'
+    UM = 'UM'
+    US = 'US'
+    UY = 'UY'
+    UZ = 'UZ'
+    VA = 'VA'
+    VC = 'VC'
+    VE = 'VE'
+    VG = 'VG'
+    VI = 'VI'
+    VN = 'VN'
+    VU = 'VU'
+    WF = 'WF'
+    WS = 'WS'
+    YE = 'YE'
+    YT = 'YT'
+    ZA = 'ZA'
+    ZM = 'ZM'
+    ZW = 'ZW'
+
+
+class SponsorsGoalKind(Enum):
+    """
+    The different kinds of goals a GitHub Sponsors member can have.
+    """
+
+    MONTHLY_SPONSORSHIP_AMOUNT = 'MONTHLY_SPONSORSHIP_AMOUNT'
+    TOTAL_SPONSORS_COUNT = 'TOTAL_SPONSORS_COUNT'
+
+
+class SponsorsListingFeaturedItemFeatureableType(Enum):
+    """
+    The different kinds of records that can be featured on a GitHub Sponsors profile page.
+    """
+
+    REPOSITORY = 'REPOSITORY'
+    USER = 'USER'
+
+
+class SponsorsTierOrderField(Enum):
+    """
+    Properties by which Sponsors tiers connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    MONTHLY_PRICE_IN_CENTS = 'MONTHLY_PRICE_IN_CENTS'
+
+
+class SponsorshipNewsletterOrderField(Enum):
+    """
+    Properties by which sponsorship update connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+
+
+class SponsorshipOrderField(Enum):
+    """
+    Properties by which sponsorship connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+
+
+class SponsorshipPaymentSource(Enum):
+    """
+    How payment was made for funding a GitHub Sponsors sponsorship.
+    """
+
+    GITHUB = 'GITHUB'
+    PATREON = 'PATREON'
+
+
+class SponsorshipPrivacy(Enum):
+    """
+    The privacy of a sponsorship
+    """
+
+    PRIVATE = 'PRIVATE'
+    PUBLIC = 'PUBLIC'
+
+
+class SquashMergeCommitMessage(Enum):
+    """
+    The possible default commit messages for squash merges.
+    """
+
+    BLANK = 'BLANK'
+    COMMIT_MESSAGES = 'COMMIT_MESSAGES'
+    PR_BODY = 'PR_BODY'
+
+
+class SquashMergeCommitTitle(Enum):
+    """
+    The possible default commit titles for squash merges.
+    """
+
+    COMMIT_OR_PR_TITLE = 'COMMIT_OR_PR_TITLE'
+    PR_TITLE = 'PR_TITLE'
+
+
+class StarOrderField(Enum):
+    """
+    Properties by which star connections can be ordered.
+    """
+
+    STARRED_AT = 'STARRED_AT'
+
+
+class StatusState(Enum):
+    """
+    The possible commit status states.
+    """
+
+    ERROR = 'ERROR'
+    EXPECTED = 'EXPECTED'
+    FAILURE = 'FAILURE'
+    PENDING = 'PENDING'
+    SUCCESS = 'SUCCESS'
+
+
+class SubscriptionState(Enum):
+    """
+    The possible states of a subscription.
+    """
+
+    IGNORED = 'IGNORED'
+    SUBSCRIBED = 'SUBSCRIBED'
+    UNSUBSCRIBED = 'UNSUBSCRIBED'
+
+
+class TeamDiscussionCommentOrderField(Enum):
+    """
+    Properties by which team discussion comment connections can be ordered.
+    """
+
+    NUMBER = 'NUMBER'
+
+
+class TeamDiscussionOrderField(Enum):
+    """
+    Properties by which team discussion connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+
+
+class TeamMemberOrderField(Enum):
+    """
+    Properties by which team member connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    LOGIN = 'LOGIN'
+
+
+class TeamMemberRole(Enum):
+    """
+    The possible team member roles; either 'maintainer' or 'member'.
+    """
+
+    MAINTAINER = 'MAINTAINER'
+    MEMBER = 'MEMBER'
+
+
+class TeamMembershipType(Enum):
+    """
+    Defines which types of team members are included in the returned list. Can be one of IMMEDIATE, CHILD_TEAM or ALL.
+    """
+
+    ALL = 'ALL'
+    CHILD_TEAM = 'CHILD_TEAM'
+    IMMEDIATE = 'IMMEDIATE'
+
+
+class TeamNotificationSetting(Enum):
+    """
+    The possible team notification values.
+    """
+
+    NOTIFICATIONS_DISABLED = 'NOTIFICATIONS_DISABLED'
+    NOTIFICATIONS_ENABLED = 'NOTIFICATIONS_ENABLED'
+
+
+class TeamOrderField(Enum):
+    """
+    Properties by which team connections can be ordered.
+    """
+
+    NAME = 'NAME'
+
+
+class TeamPrivacy(Enum):
+    """
+    The possible team privacy values.
+    """
+
+    SECRET = 'SECRET'
+    VISIBLE = 'VISIBLE'
+
+
+class TeamRepositoryOrderField(Enum):
+    """
+    Properties by which team repository connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    NAME = 'NAME'
+    PERMISSION = 'PERMISSION'
+    PUSHED_AT = 'PUSHED_AT'
+    STARGAZERS = 'STARGAZERS'
+    UPDATED_AT = 'UPDATED_AT'
+
+
+class TeamReviewAssignmentAlgorithm(Enum):
+    """
+    The possible team review assignment algorithms
+    """
+
+    LOAD_BALANCE = 'LOAD_BALANCE'
+    ROUND_ROBIN = 'ROUND_ROBIN'
+
+
+class TeamRole(Enum):
+    """
+    The role of a user on a team.
+    """
+
+    ADMIN = 'ADMIN'
+    MEMBER = 'MEMBER'
+
+
+class ThreadSubscriptionFormAction(Enum):
+    """
+    The possible states of a thread subscription form action
+    """
+
+    NONE = 'NONE'
+    SUBSCRIBE = 'SUBSCRIBE'
+    UNSUBSCRIBE = 'UNSUBSCRIBE'
+
+
+class ThreadSubscriptionState(Enum):
+    """
+    The possible states of a subscription.
+    """
+
+    DISABLED = 'DISABLED'
+    IGNORING_LIST = 'IGNORING_LIST'
+    IGNORING_THREAD = 'IGNORING_THREAD'
+    NONE = 'NONE'
+    SUBSCRIBED_TO_LIST = 'SUBSCRIBED_TO_LIST'
+    SUBSCRIBED_TO_THREAD = 'SUBSCRIBED_TO_THREAD'
+    SUBSCRIBED_TO_THREAD_EVENTS = 'SUBSCRIBED_TO_THREAD_EVENTS'
+    SUBSCRIBED_TO_THREAD_TYPE = 'SUBSCRIBED_TO_THREAD_TYPE'
+    UNAVAILABLE = 'UNAVAILABLE'
+
+
+class TopicSuggestionDeclineReason(Enum):
+    """
+    Reason that the suggested topic is declined.
+    """
+
+    NOT_RELEVANT = 'NOT_RELEVANT'
+    PERSONAL_PREFERENCE = 'PERSONAL_PREFERENCE'
+    TOO_GENERAL = 'TOO_GENERAL'
+    TOO_SPECIFIC = 'TOO_SPECIFIC'
+
+
+class TrackedIssueStates(Enum):
+    """
+    The possible states of a tracked issue.
+    """
+
+    CLOSED = 'CLOSED'
+    OPEN = 'OPEN'
+
+
+class UserBlockDuration(Enum):
+    """
+    The possible durations that a user can be blocked for.
+    """
+
+    ONE_DAY = 'ONE_DAY'
+    ONE_MONTH = 'ONE_MONTH'
+    ONE_WEEK = 'ONE_WEEK'
+    PERMANENT = 'PERMANENT'
+    THREE_DAYS = 'THREE_DAYS'
+
+
+class UserStatusOrderField(Enum):
+    """
+    Properties by which user status connections can be ordered.
+    """
+
+    UPDATED_AT = 'UPDATED_AT'
+
+
+class VerifiableDomainOrderField(Enum):
+    """
+    Properties by which verifiable domain connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+    DOMAIN = 'DOMAIN'
+
+
+class WorkflowRunOrderField(Enum):
+    """
+    Properties by which workflow run connections can be ordered.
+    """
+
+    CREATED_AT = 'CREATED_AT'
+
+
+class WorkflowState(Enum):
+    """
+    The possible states for a workflow.
+    """
+
+    ACTIVE = 'ACTIVE'
+    DELETED = 'DELETED'
+    DISABLED_FORK = 'DISABLED_FORK'
+    DISABLED_INACTIVITY = 'DISABLED_INACTIVITY'
+    DISABLED_MANUALLY = 'DISABLED_MANUALLY'
+
+
+class Actor(BaseModel):
+    """
+    Represents an object which can take actions on GitHub. Typically a User or Bot.
+    """
+
+    avatarUrl: URI
+    login: String
+    resourcePath: URI
+    url: URI
+    typename__: Optional[Literal['Actor']] = Field('Actor', alias='__typename')
+
+
+class AnnouncementBanner(BaseModel):
+    """
+    Represents an announcement banner.
+    """
+
+    announcement: Optional[String] = None
+    announcementExpiresAt: Optional[DateTime] = None
+    announcementUserDismissible: Optional[Boolean] = None
+    typename__: Optional[Literal['AnnouncementBanner']] = Field(
+        'AnnouncementBanner', alias='__typename'
+    )
+
+
+class Assignable(BaseModel):
+    """
+    An object that can have users assigned to it.
+    """
+
+    assignees: UserConnection
+    typename__: Optional[Literal['Assignable']] = Field(
+        'Assignable', alias='__typename'
+    )
+
+
+class AuditEntry(BaseModel):
+    """
+    An entry in the audit log.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    operationType: Optional[OperationType] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['AuditEntry']] = Field(
+        'AuditEntry', alias='__typename'
+    )
+
+
+class Closable(BaseModel):
+    """
+    An object that can be closed
+    """
+
+    closed: Boolean
+    closedAt: Optional[DateTime] = None
+    viewerCanClose: Boolean
+    viewerCanReopen: Boolean
+    typename__: Optional[Literal['Closable']] = Field('Closable', alias='__typename')
+
+
+class Comment(BaseModel):
+    """
+    Represents a comment.
+    """
+
+    author: Optional[Actor] = None
+    authorAssociation: CommentAuthorAssociation
+    body: String
+    bodyHTML: HTML
+    bodyText: String
+    createdAt: DateTime
+    createdViaEmail: Boolean
+    editor: Optional[Actor] = None
+    id: ID
+    includesCreatedEdit: Boolean
+    lastEditedAt: Optional[DateTime] = None
+    publishedAt: Optional[DateTime] = None
+    updatedAt: DateTime
+    userContentEdits: Optional[UserContentEditConnection] = None
+    viewerDidAuthor: Boolean
+    typename__: Optional[Literal['Comment']] = Field('Comment', alias='__typename')
+
+
+class Contribution(BaseModel):
+    """
+    Represents a contribution a user made on GitHub, such as opening an issue.
+    """
+
+    isRestricted: Boolean
+    occurredAt: DateTime
+    resourcePath: URI
+    url: URI
+    user: User
+    typename__: Optional[Literal['Contribution']] = Field(
+        'Contribution', alias='__typename'
+    )
+
+
+class Deletable(BaseModel):
+    """
+    Entities that can be deleted.
+    """
+
+    viewerCanDelete: Boolean
+    typename__: Optional[Literal['Deletable']] = Field('Deletable', alias='__typename')
+
+
+class EnterpriseAuditEntryData(BaseModel):
+    """
+    Metadata for an audit entry containing enterprise account information.
+    """
+
+    enterpriseResourcePath: Optional[URI] = None
+    enterpriseSlug: Optional[String] = None
+    enterpriseUrl: Optional[URI] = None
+    typename__: Optional[Literal['EnterpriseAuditEntryData']] = Field(
+        'EnterpriseAuditEntryData', alias='__typename'
+    )
+
+
+class GitObject(BaseModel):
+    """
+    Represents a Git object.
+    """
+
+    abbreviatedOid: String
+    commitResourcePath: URI
+    commitUrl: URI
+    id: ID
+    oid: GitObjectID
+    repository: Repository
+    typename__: Optional[Literal['GitObject']] = Field('GitObject', alias='__typename')
+
+
+class GitSignature(BaseModel):
+    """
+    Information about a signature (GPG or S/MIME) on a Commit or Tag.
+    """
+
+    email: String
+    isValid: Boolean
+    payload: String
+    signature: String
+    signer: Optional[User] = None
+    state: GitSignatureState
+    wasSignedByGitHub: Boolean
+    typename__: Optional[Literal['GitSignature']] = Field(
+        'GitSignature', alias='__typename'
+    )
+
+
+class HovercardContext(BaseModel):
+    """
+    An individual line of a hovercard
+    """
+
+    message: String
+    octicon: String
+    typename__: Optional[Literal['HovercardContext']] = Field(
+        'HovercardContext', alias='__typename'
+    )
+
+
+class Labelable(BaseModel):
+    """
+    An object that can have labels assigned to it.
+    """
+
+    labels: Optional[LabelConnection] = None
+    typename__: Optional[Literal['Labelable']] = Field('Labelable', alias='__typename')
+
+
+class Lockable(BaseModel):
+    """
+    An object that can be locked.
+    """
+
+    activeLockReason: Optional[LockReason] = None
+    locked: Boolean
+    typename__: Optional[Literal['Lockable']] = Field('Lockable', alias='__typename')
+
+
+class MemberStatusable(BaseModel):
+    """
+    Entities that have members who can set status messages.
+    """
+
+    memberStatuses: UserStatusConnection
+    typename__: Optional[Literal['MemberStatusable']] = Field(
+        'MemberStatusable', alias='__typename'
+    )
+
+
+class Migration(BaseModel):
+    """
+    Represents a GitHub Enterprise Importer (GEI) migration.
+    """
+
+    continueOnError: Boolean
+    createdAt: DateTime
+    databaseId: Optional[String] = None
+    failureReason: Optional[String] = None
+    id: ID
+    migrationLogUrl: Optional[URI] = None
+    migrationSource: MigrationSource
+    repositoryName: String
+    sourceUrl: URI
+    state: MigrationState
+    warningsCount: Int
+    typename__: Optional[Literal['Migration']] = Field('Migration', alias='__typename')
+
+
+class Minimizable(BaseModel):
+    """
+    Entities that can be minimized.
+    """
+
+    isMinimized: Boolean
+    minimizedReason: Optional[String] = None
+    viewerCanMinimize: Boolean
+    typename__: Optional[Literal['Minimizable']] = Field(
+        'Minimizable', alias='__typename'
+    )
+
+
+class Node(BaseModel):
+    """
+    An object with an ID.
+    """
+
+    id: ID
+    typename__: Optional[Literal['Node']] = Field('Node', alias='__typename')
+
+
+class OauthApplicationAuditEntryData(BaseModel):
+    """
+    Metadata for an audit entry with action oauth_application.*
+    """
+
+    oauthApplicationName: Optional[String] = None
+    oauthApplicationResourcePath: Optional[URI] = None
+    oauthApplicationUrl: Optional[URI] = None
+    typename__: Optional[Literal['OauthApplicationAuditEntryData']] = Field(
+        'OauthApplicationAuditEntryData', alias='__typename'
+    )
+
+
+class OrganizationAuditEntryData(BaseModel):
+    """
+    Metadata for an audit entry with action org.*
+    """
+
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrganizationAuditEntryData']] = Field(
+        'OrganizationAuditEntryData', alias='__typename'
+    )
+
+
+class PackageOwner(BaseModel):
+    """
+    Represents an owner of a package.
+    """
+
+    id: ID
+    packages: PackageConnection
+    typename__: Optional[Literal['PackageOwner']] = Field(
+        'PackageOwner', alias='__typename'
+    )
+
+
+class ProfileOwner(BaseModel):
+    """
+    Represents any entity on GitHub that has a profile page.
+    """
+
+    anyPinnableItems: Boolean
+    email: Optional[String] = None
+    id: ID
+    itemShowcase: ProfileItemShowcase
+    location: Optional[String] = None
+    login: String
+    name: Optional[String] = None
+    pinnableItems: PinnableItemConnection
+    pinnedItems: PinnableItemConnection
+    pinnedItemsRemaining: Int
+    viewerCanChangePinnedItems: Boolean
+    websiteUrl: Optional[URI] = None
+    typename__: Optional[Literal['ProfileOwner']] = Field(
+        'ProfileOwner', alias='__typename'
+    )
+
+
+class ProjectOwner(BaseModel):
+    """
+    Represents an owner of a Project.
+    """
+
+    id: ID
+    project: Optional[Project] = None
+    projects: ProjectConnection
+    projectsResourcePath: URI
+    projectsUrl: URI
+    viewerCanCreateProjects: Boolean
+    typename__: Optional[Literal['ProjectOwner']] = Field(
+        'ProjectOwner', alias='__typename'
+    )
+
+
+class ProjectV2FieldCommon(BaseModel):
+    """
+    Common fields across different project field types
+    """
+
+    createdAt: DateTime
+    dataType: ProjectV2FieldType
+    databaseId: Optional[Int] = None
+    id: ID
+    name: String
+    project: ProjectV2
+    updatedAt: DateTime
+    typename__: Optional[Literal['ProjectV2FieldCommon']] = Field(
+        'ProjectV2FieldCommon', alias='__typename'
+    )
+
+
+class ProjectV2ItemFieldValueCommon(BaseModel):
+    """
+    Common fields across different project field value types
+    """
+
+    createdAt: DateTime
+    creator: Optional[Actor] = None
+    databaseId: Optional[Int] = None
+    field: ProjectV2FieldConfiguration
+    id: ID
+    item: ProjectV2Item
+    updatedAt: DateTime
+    typename__: Optional[Literal['ProjectV2ItemFieldValueCommon']] = Field(
+        'ProjectV2ItemFieldValueCommon', alias='__typename'
+    )
+
+
+class ProjectV2Owner(BaseModel):
+    """
+    Represents an owner of a project (beta).
+    """
+
+    id: ID
+    projectV2: Optional[ProjectV2] = None
+    projectsV2: ProjectV2Connection
+    typename__: Optional[Literal['ProjectV2Owner']] = Field(
+        'ProjectV2Owner', alias='__typename'
+    )
+
+
+class ProjectV2Recent(BaseModel):
+    """
+    Recent projects for the owner.
+    """
+
+    recentProjects: ProjectV2Connection
+    typename__: Optional[Literal['ProjectV2Recent']] = Field(
+        'ProjectV2Recent', alias='__typename'
+    )
+
+
+class Reactable(BaseModel):
+    """
+    Represents a subject that can be reacted on.
+    """
+
+    databaseId: Optional[Int] = None
+    id: ID
+    reactionGroups: Optional[List[ReactionGroup]] = Field(default_factory=list)
+    reactions: ReactionConnection
+    viewerCanReact: Boolean
+    typename__: Optional[Literal['Reactable']] = Field('Reactable', alias='__typename')
+
+
+class RepositoryAuditEntryData(BaseModel):
+    """
+    Metadata for an audit entry with action repo.*
+    """
+
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    typename__: Optional[Literal['RepositoryAuditEntryData']] = Field(
+        'RepositoryAuditEntryData', alias='__typename'
+    )
+
+
+class RepositoryDiscussionAuthor(BaseModel):
+    """
+    Represents an author of discussions in repositories.
+    """
+
+    repositoryDiscussions: DiscussionConnection
+    typename__: Optional[Literal['RepositoryDiscussionAuthor']] = Field(
+        'RepositoryDiscussionAuthor', alias='__typename'
+    )
+
+
+class RepositoryDiscussionCommentAuthor(BaseModel):
+    """
+    Represents an author of discussion comments in repositories.
+    """
+
+    repositoryDiscussionComments: DiscussionCommentConnection
+    typename__: Optional[Literal['RepositoryDiscussionCommentAuthor']] = Field(
+        'RepositoryDiscussionCommentAuthor', alias='__typename'
+    )
+
+
+class RepositoryInfo(BaseModel):
+    """
+    A subset of repository info.
+    """
+
+    archivedAt: Optional[DateTime] = None
+    createdAt: DateTime
+    description: Optional[String] = None
+    descriptionHTML: HTML
+    forkCount: Int
+    hasDiscussionsEnabled: Boolean
+    hasIssuesEnabled: Boolean
+    hasProjectsEnabled: Boolean
+    hasWikiEnabled: Boolean
+    homepageUrl: Optional[URI] = None
+    isArchived: Boolean
+    isFork: Boolean
+    isInOrganization: Boolean
+    isLocked: Boolean
+    isMirror: Boolean
+    isPrivate: Boolean
+    isTemplate: Boolean
+    licenseInfo: Optional[License] = None
+    lockReason: Optional[RepositoryLockReason] = None
+    mirrorUrl: Optional[URI] = None
+    name: String
+    nameWithOwner: String
+    openGraphImageUrl: URI
+    owner: RepositoryOwner
+    pushedAt: Optional[DateTime] = None
+    resourcePath: URI
+    shortDescriptionHTML: HTML
+    updatedAt: DateTime
+    url: URI
+    usesCustomOpenGraphImage: Boolean
+    visibility: RepositoryVisibility
+    typename__: Optional[Literal['RepositoryInfo']] = Field(
+        'RepositoryInfo', alias='__typename'
+    )
+
+
+class RepositoryNode(BaseModel):
+    """
+    Represents a object that belongs to a repository.
+    """
+
+    repository: Repository
+    typename__: Optional[Literal['RepositoryNode']] = Field(
+        'RepositoryNode', alias='__typename'
+    )
+
+
+class RepositoryOwner(BaseModel):
+    """
+    Represents an owner of a Repository.
+    """
+
+    avatarUrl: URI
+    id: ID
+    login: String
+    repositories: RepositoryConnection
+    repository: Optional[Repository] = None
+    resourcePath: URI
+    url: URI
+    typename__: Optional[Literal['RepositoryOwner']] = Field(
+        'RepositoryOwner', alias='__typename'
+    )
+
+
+class RequirableByPullRequest(BaseModel):
+    """
+    Represents a type that can be required by a pull request for merging.
+    """
+
+    isRequired: Boolean
+    typename__: Optional[Literal['RequirableByPullRequest']] = Field(
+        'RequirableByPullRequest', alias='__typename'
+    )
+
+
+class Sponsorable(BaseModel):
+    """
+    Entities that can sponsor or be sponsored through GitHub Sponsors.
+    """
+
+    estimatedNextSponsorsPayoutInCents: Int
+    hasSponsorsListing: Boolean
+    isSponsoredBy: Boolean
+    isSponsoringViewer: Boolean
+    monthlyEstimatedSponsorsIncomeInCents: Int
+    sponsoring: SponsorConnection
+    sponsors: SponsorConnection
+    sponsorsActivities: SponsorsActivityConnection
+    sponsorsListing: Optional[SponsorsListing] = None
+    sponsorshipForViewerAsSponsor: Optional[Sponsorship] = None
+    sponsorshipForViewerAsSponsorable: Optional[Sponsorship] = None
+    sponsorshipNewsletters: SponsorshipNewsletterConnection
+    sponsorshipsAsMaintainer: SponsorshipConnection
+    sponsorshipsAsSponsor: SponsorshipConnection
+    totalSponsorshipAmountAsSponsorInCents: Optional[Int] = None
+    viewerCanSponsor: Boolean
+    viewerIsSponsoring: Boolean
+    typename__: Optional[Literal['Sponsorable']] = Field(
+        'Sponsorable', alias='__typename'
+    )
+
+
+class Starrable(BaseModel):
+    """
+    Things that can be starred.
+    """
+
+    id: ID
+    stargazerCount: Int
+    stargazers: StargazerConnection
+    viewerHasStarred: Boolean
+    typename__: Optional[Literal['Starrable']] = Field('Starrable', alias='__typename')
+
+
+class Subscribable(BaseModel):
+    """
+    Entities that can be subscribed to for web and email notifications.
+    """
+
+    id: ID
+    viewerCanSubscribe: Boolean
+    viewerSubscription: Optional[SubscriptionState] = None
+    typename__: Optional[Literal['Subscribable']] = Field(
+        'Subscribable', alias='__typename'
+    )
+
+
+class SubscribableThread(BaseModel):
+    """
+    Entities that can be subscribed to for web and email notifications.
+    """
+
+    id: ID
+    viewerThreadSubscriptionFormAction: Optional[ThreadSubscriptionFormAction] = None
+    viewerThreadSubscriptionStatus: Optional[ThreadSubscriptionState] = None
+    typename__: Optional[Literal['SubscribableThread']] = Field(
+        'SubscribableThread', alias='__typename'
+    )
+
+
+class TeamAuditEntryData(BaseModel):
+    """
+    Metadata for an audit entry with action team.*
+    """
+
+    team: Optional[Team] = None
+    teamName: Optional[String] = None
+    teamResourcePath: Optional[URI] = None
+    teamUrl: Optional[URI] = None
+    typename__: Optional[Literal['TeamAuditEntryData']] = Field(
+        'TeamAuditEntryData', alias='__typename'
+    )
+
+
+class TopicAuditEntryData(BaseModel):
+    """
+    Metadata for an audit entry with a topic.
+    """
+
+    topic: Optional[Topic] = None
+    topicName: Optional[String] = None
+    typename__: Optional[Literal['TopicAuditEntryData']] = Field(
+        'TopicAuditEntryData', alias='__typename'
+    )
+
+
+class UniformResourceLocatable(BaseModel):
+    """
+    Represents a type that can be retrieved by a URL.
+    """
+
+    resourcePath: URI
+    url: URI
+    typename__: Optional[Literal['UniformResourceLocatable']] = Field(
+        'UniformResourceLocatable', alias='__typename'
+    )
+
+
+class Updatable(BaseModel):
+    """
+    Entities that can be updated.
+    """
+
+    viewerCanUpdate: Boolean
+    typename__: Optional[Literal['Updatable']] = Field('Updatable', alias='__typename')
+
+
+class UpdatableComment(BaseModel):
+    """
+    Comments that can be updated.
+    """
+
+    viewerCannotUpdateReasons: List[CommentCannotUpdateReason]
+    typename__: Optional[Literal['UpdatableComment']] = Field(
+        'UpdatableComment', alias='__typename'
+    )
+
+
+class Votable(BaseModel):
+    """
+    A subject that may be upvoted.
+    """
+
+    upvoteCount: Int
+    viewerCanUpvote: Boolean
+    viewerHasUpvoted: Boolean
+    typename__: Optional[Literal['Votable']] = Field('Votable', alias='__typename')
+
+
+class AbortQueuedMigrationsPayload(BaseModel):
+    """
+    Autogenerated return type of AbortQueuedMigrations
+    """
+
+    clientMutationId: Optional[String] = None
+    success: Optional[Boolean] = None
+    typename__: Optional[Literal['AbortQueuedMigrationsPayload']] = Field(
+        'AbortQueuedMigrationsPayload', alias='__typename'
+    )
+
+
+class AbortRepositoryMigrationPayload(BaseModel):
+    """
+    Autogenerated return type of AbortRepositoryMigration
+    """
+
+    clientMutationId: Optional[String] = None
+    success: Optional[Boolean] = None
+    typename__: Optional[Literal['AbortRepositoryMigrationPayload']] = Field(
+        'AbortRepositoryMigrationPayload', alias='__typename'
+    )
+
+
+class AcceptEnterpriseAdministratorInvitationPayload(BaseModel):
+    """
+    Autogenerated return type of AcceptEnterpriseAdministratorInvitation
+    """
+
+    clientMutationId: Optional[String] = None
+    invitation: Optional[EnterpriseAdministratorInvitation] = None
+    message: Optional[String] = None
+    typename__: Optional[
+        Literal['AcceptEnterpriseAdministratorInvitationPayload']
+    ] = Field('AcceptEnterpriseAdministratorInvitationPayload', alias='__typename')
+
+
+class AcceptTopicSuggestionPayload(BaseModel):
+    """
+    Autogenerated return type of AcceptTopicSuggestion
+    """
+
+    clientMutationId: Optional[String] = None
+    topic: Optional[Topic] = None
+    typename__: Optional[Literal['AcceptTopicSuggestionPayload']] = Field(
+        'AcceptTopicSuggestionPayload', alias='__typename'
+    )
+
+
+class ActorLocation(BaseModel):
+    """
+    Location information for an actor
+    """
+
+    city: Optional[String] = None
+    country: Optional[String] = None
+    countryCode: Optional[String] = None
+    region: Optional[String] = None
+    regionCode: Optional[String] = None
+    typename__: Optional[Literal['ActorLocation']] = Field(
+        'ActorLocation', alias='__typename'
+    )
+
+
+class AddAssigneesToAssignablePayload(BaseModel):
+    """
+    Autogenerated return type of AddAssigneesToAssignable
+    """
+
+    assignable: Optional[Assignable] = None
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['AddAssigneesToAssignablePayload']] = Field(
+        'AddAssigneesToAssignablePayload', alias='__typename'
+    )
+
+
+class AddCommentPayload(BaseModel):
+    """
+    Autogenerated return type of AddComment
+    """
+
+    clientMutationId: Optional[String] = None
+    commentEdge: Optional[IssueCommentEdge] = None
+    subject: Optional[Node] = None
+    timelineEdge: Optional[IssueTimelineItemEdge] = None
+    typename__: Optional[Literal['AddCommentPayload']] = Field(
+        'AddCommentPayload', alias='__typename'
+    )
+
+
+class AddDiscussionCommentPayload(BaseModel):
+    """
+    Autogenerated return type of AddDiscussionComment
+    """
+
+    clientMutationId: Optional[String] = None
+    comment: Optional[DiscussionComment] = None
+    typename__: Optional[Literal['AddDiscussionCommentPayload']] = Field(
+        'AddDiscussionCommentPayload', alias='__typename'
+    )
+
+
+class AddDiscussionPollVotePayload(BaseModel):
+    """
+    Autogenerated return type of AddDiscussionPollVote
+    """
+
+    clientMutationId: Optional[String] = None
+    pollOption: Optional[DiscussionPollOption] = None
+    typename__: Optional[Literal['AddDiscussionPollVotePayload']] = Field(
+        'AddDiscussionPollVotePayload', alias='__typename'
+    )
+
+
+class AddEnterpriseOrganizationMemberPayload(BaseModel):
+    """
+    Autogenerated return type of AddEnterpriseOrganizationMember
+    """
+
+    clientMutationId: Optional[String] = None
+    users: Optional[List[User]] = Field(default_factory=list)
+    typename__: Optional[Literal['AddEnterpriseOrganizationMemberPayload']] = Field(
+        'AddEnterpriseOrganizationMemberPayload', alias='__typename'
+    )
+
+
+class AddEnterpriseSupportEntitlementPayload(BaseModel):
+    """
+    Autogenerated return type of AddEnterpriseSupportEntitlement
+    """
+
+    clientMutationId: Optional[String] = None
+    message: Optional[String] = None
+    typename__: Optional[Literal['AddEnterpriseSupportEntitlementPayload']] = Field(
+        'AddEnterpriseSupportEntitlementPayload', alias='__typename'
+    )
+
+
+class AddLabelsToLabelablePayload(BaseModel):
+    """
+    Autogenerated return type of AddLabelsToLabelable
+    """
+
+    clientMutationId: Optional[String] = None
+    labelable: Optional[Labelable] = None
+    typename__: Optional[Literal['AddLabelsToLabelablePayload']] = Field(
+        'AddLabelsToLabelablePayload', alias='__typename'
+    )
+
+
+class AddProjectCardPayload(BaseModel):
+    """
+    Autogenerated return type of AddProjectCard
+    """
+
+    cardEdge: Optional[ProjectCardEdge] = None
+    clientMutationId: Optional[String] = None
+    projectColumn: Optional[ProjectColumn] = None
+    typename__: Optional[Literal['AddProjectCardPayload']] = Field(
+        'AddProjectCardPayload', alias='__typename'
+    )
+
+
+class AddProjectColumnPayload(BaseModel):
+    """
+    Autogenerated return type of AddProjectColumn
+    """
+
+    clientMutationId: Optional[String] = None
+    columnEdge: Optional[ProjectColumnEdge] = None
+    project: Optional[Project] = None
+    typename__: Optional[Literal['AddProjectColumnPayload']] = Field(
+        'AddProjectColumnPayload', alias='__typename'
+    )
+
+
+class AddProjectV2DraftIssuePayload(BaseModel):
+    """
+    Autogenerated return type of AddProjectV2DraftIssue
+    """
+
+    clientMutationId: Optional[String] = None
+    projectItem: Optional[ProjectV2Item] = None
+    typename__: Optional[Literal['AddProjectV2DraftIssuePayload']] = Field(
+        'AddProjectV2DraftIssuePayload', alias='__typename'
+    )
+
+
+class AddProjectV2ItemByIdPayload(BaseModel):
+    """
+    Autogenerated return type of AddProjectV2ItemById
+    """
+
+    clientMutationId: Optional[String] = None
+    item: Optional[ProjectV2Item] = None
+    typename__: Optional[Literal['AddProjectV2ItemByIdPayload']] = Field(
+        'AddProjectV2ItemByIdPayload', alias='__typename'
+    )
+
+
+class AddPullRequestReviewCommentPayload(BaseModel):
+    """
+    Autogenerated return type of AddPullRequestReviewComment
+    """
+
+    clientMutationId: Optional[String] = None
+    comment: Optional[PullRequestReviewComment] = None
+    commentEdge: Optional[PullRequestReviewCommentEdge] = None
+    typename__: Optional[Literal['AddPullRequestReviewCommentPayload']] = Field(
+        'AddPullRequestReviewCommentPayload', alias='__typename'
+    )
+
+
+class AddPullRequestReviewPayload(BaseModel):
+    """
+    Autogenerated return type of AddPullRequestReview
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequestReview: Optional[PullRequestReview] = None
+    reviewEdge: Optional[PullRequestReviewEdge] = None
+    typename__: Optional[Literal['AddPullRequestReviewPayload']] = Field(
+        'AddPullRequestReviewPayload', alias='__typename'
+    )
+
+
+class AddPullRequestReviewThreadPayload(BaseModel):
+    """
+    Autogenerated return type of AddPullRequestReviewThread
+    """
+
+    clientMutationId: Optional[String] = None
+    thread: Optional[PullRequestReviewThread] = None
+    typename__: Optional[Literal['AddPullRequestReviewThreadPayload']] = Field(
+        'AddPullRequestReviewThreadPayload', alias='__typename'
+    )
+
+
+class AddPullRequestReviewThreadReplyPayload(BaseModel):
+    """
+    Autogenerated return type of AddPullRequestReviewThreadReply
+    """
+
+    clientMutationId: Optional[String] = None
+    comment: Optional[PullRequestReviewComment] = None
+    typename__: Optional[Literal['AddPullRequestReviewThreadReplyPayload']] = Field(
+        'AddPullRequestReviewThreadReplyPayload', alias='__typename'
+    )
+
+
+class AddReactionPayload(BaseModel):
+    """
+    Autogenerated return type of AddReaction
+    """
+
+    clientMutationId: Optional[String] = None
+    reaction: Optional[Reaction] = None
+    reactionGroups: Optional[List[ReactionGroup]] = Field(default_factory=list)
+    subject: Optional[Reactable] = None
+    typename__: Optional[Literal['AddReactionPayload']] = Field(
+        'AddReactionPayload', alias='__typename'
+    )
+
+
+class AddStarPayload(BaseModel):
+    """
+    Autogenerated return type of AddStar
+    """
+
+    clientMutationId: Optional[String] = None
+    starrable: Optional[Starrable] = None
+    typename__: Optional[Literal['AddStarPayload']] = Field(
+        'AddStarPayload', alias='__typename'
+    )
+
+
+class AddUpvotePayload(BaseModel):
+    """
+    Autogenerated return type of AddUpvote
+    """
+
+    clientMutationId: Optional[String] = None
+    subject: Optional[Votable] = None
+    typename__: Optional[Literal['AddUpvotePayload']] = Field(
+        'AddUpvotePayload', alias='__typename'
+    )
+
+
+class AddVerifiableDomainPayload(BaseModel):
+    """
+    Autogenerated return type of AddVerifiableDomain
+    """
+
+    clientMutationId: Optional[String] = None
+    domain: Optional[VerifiableDomain] = None
+    typename__: Optional[Literal['AddVerifiableDomainPayload']] = Field(
+        'AddVerifiableDomainPayload', alias='__typename'
+    )
+
+
+class AddedToMergeQueueEvent(Node):
+    """
+    Represents an 'added_to_merge_queue' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    enqueuer: Optional[User] = None
+    id: ID
+    mergeQueue: Optional[MergeQueue] = None
+    pullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['AddedToMergeQueueEvent']] = Field(
+        'AddedToMergeQueueEvent', alias='__typename'
+    )
+
+
+class AddedToProjectEvent(Node):
+    """
+    Represents a 'added_to_project' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    id: ID
+    project: Optional[Project] = None
+    projectCard: Optional[ProjectCard] = None
+    projectColumnName: String
+    typename__: Optional[Literal['AddedToProjectEvent']] = Field(
+        'AddedToProjectEvent', alias='__typename'
+    )
+
+
+class App(Node):
+    """
+    A GitHub App.
+    """
+
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    description: Optional[String] = None
+    id: ID
+    ipAllowListEntries: IpAllowListEntryConnection
+    logoBackgroundColor: String
+    logoUrl: URI
+    name: String
+    slug: String
+    updatedAt: DateTime
+    url: URI
+    typename__: Optional[Literal['App']] = Field('App', alias='__typename')
+
+
+class ApproveDeploymentsPayload(BaseModel):
+    """
+    Autogenerated return type of ApproveDeployments
+    """
+
+    clientMutationId: Optional[String] = None
+    deployments: Optional[List[Deployment]] = Field(default_factory=list)
+    typename__: Optional[Literal['ApproveDeploymentsPayload']] = Field(
+        'ApproveDeploymentsPayload', alias='__typename'
+    )
+
+
+class ApproveVerifiableDomainPayload(BaseModel):
+    """
+    Autogenerated return type of ApproveVerifiableDomain
+    """
+
+    clientMutationId: Optional[String] = None
+    domain: Optional[VerifiableDomain] = None
+    typename__: Optional[Literal['ApproveVerifiableDomainPayload']] = Field(
+        'ApproveVerifiableDomainPayload', alias='__typename'
+    )
+
+
+class ArchiveProjectV2ItemPayload(BaseModel):
+    """
+    Autogenerated return type of ArchiveProjectV2Item
+    """
+
+    clientMutationId: Optional[String] = None
+    item: Optional[ProjectV2Item] = None
+    typename__: Optional[Literal['ArchiveProjectV2ItemPayload']] = Field(
+        'ArchiveProjectV2ItemPayload', alias='__typename'
+    )
+
+
+class ArchiveRepositoryPayload(BaseModel):
+    """
+    Autogenerated return type of ArchiveRepository
+    """
+
+    clientMutationId: Optional[String] = None
+    repository: Optional[Repository] = None
+    typename__: Optional[Literal['ArchiveRepositoryPayload']] = Field(
+        'ArchiveRepositoryPayload', alias='__typename'
+    )
+
+
+class AssignedEvent(Node):
+    """
+    Represents an 'assigned' event on any assignable object.
+    """
+
+    actor: Optional[Actor] = None
+    assignable: Assignable
+    assignee: Optional[Assignee] = None
+    createdAt: DateTime
+    id: ID
+    user: Optional[User] = None
+    typename__: Optional[Literal['AssignedEvent']] = Field(
+        'AssignedEvent', alias='__typename'
+    )
+
+
+class AutoMergeDisabledEvent(Node):
+    """
+    Represents a 'auto_merge_disabled' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    disabler: Optional[User] = None
+    id: ID
+    pullRequest: Optional[PullRequest] = None
+    reason: Optional[String] = None
+    reasonCode: Optional[String] = None
+    typename__: Optional[Literal['AutoMergeDisabledEvent']] = Field(
+        'AutoMergeDisabledEvent', alias='__typename'
+    )
+
+
+class AutoMergeEnabledEvent(Node):
+    """
+    Represents a 'auto_merge_enabled' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    enabler: Optional[User] = None
+    id: ID
+    pullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['AutoMergeEnabledEvent']] = Field(
+        'AutoMergeEnabledEvent', alias='__typename'
+    )
+
+
+class AutoMergeRequest(BaseModel):
+    """
+    Represents an auto-merge request for a pull request
+    """
+
+    authorEmail: Optional[String] = None
+    commitBody: Optional[String] = None
+    commitHeadline: Optional[String] = None
+    enabledAt: Optional[DateTime] = None
+    enabledBy: Optional[Actor] = None
+    mergeMethod: PullRequestMergeMethod
+    pullRequest: PullRequest
+    typename__: Optional[Literal['AutoMergeRequest']] = Field(
+        'AutoMergeRequest', alias='__typename'
+    )
+
+
+class AutoRebaseEnabledEvent(Node):
+    """
+    Represents a 'auto_rebase_enabled' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    enabler: Optional[User] = None
+    id: ID
+    pullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['AutoRebaseEnabledEvent']] = Field(
+        'AutoRebaseEnabledEvent', alias='__typename'
+    )
+
+
+class AutoSquashEnabledEvent(Node):
+    """
+    Represents a 'auto_squash_enabled' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    enabler: Optional[User] = None
+    id: ID
+    pullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['AutoSquashEnabledEvent']] = Field(
+        'AutoSquashEnabledEvent', alias='__typename'
+    )
+
+
+class AutomaticBaseChangeFailedEvent(Node):
+    """
+    Represents a 'automatic_base_change_failed' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    newBase: String
+    oldBase: String
+    pullRequest: PullRequest
+    typename__: Optional[Literal['AutomaticBaseChangeFailedEvent']] = Field(
+        'AutomaticBaseChangeFailedEvent', alias='__typename'
+    )
+
+
+class AutomaticBaseChangeSucceededEvent(Node):
+    """
+    Represents a 'automatic_base_change_succeeded' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    newBase: String
+    oldBase: String
+    pullRequest: PullRequest
+    typename__: Optional[Literal['AutomaticBaseChangeSucceededEvent']] = Field(
+        'AutomaticBaseChangeSucceededEvent', alias='__typename'
+    )
+
+
+class BaseRefChangedEvent(Node):
+    """
+    Represents a 'base_ref_changed' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    currentRefName: String
+    databaseId: Optional[Int] = None
+    id: ID
+    previousRefName: String
+    pullRequest: PullRequest
+    typename__: Optional[Literal['BaseRefChangedEvent']] = Field(
+        'BaseRefChangedEvent', alias='__typename'
+    )
+
+
+class BaseRefDeletedEvent(Node):
+    """
+    Represents a 'base_ref_deleted' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    baseRefName: Optional[String] = None
+    createdAt: DateTime
+    id: ID
+    pullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['BaseRefDeletedEvent']] = Field(
+        'BaseRefDeletedEvent', alias='__typename'
+    )
+
+
+class BaseRefForcePushedEvent(Node):
+    """
+    Represents a 'base_ref_force_pushed' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    afterCommit: Optional[Commit] = None
+    beforeCommit: Optional[Commit] = None
+    createdAt: DateTime
+    id: ID
+    pullRequest: PullRequest
+    ref: Optional[Ref] = None
+    typename__: Optional[Literal['BaseRefForcePushedEvent']] = Field(
+        'BaseRefForcePushedEvent', alias='__typename'
+    )
+
+
+class Blame(BaseModel):
+    """
+    Represents a Git blame.
+    """
+
+    ranges: List[BlameRange]
+    typename__: Optional[Literal['Blame']] = Field('Blame', alias='__typename')
+
+
+class BlameRange(BaseModel):
+    """
+    Represents a range of information from a Git blame.
+    """
+
+    age: Int
+    commit: Commit
+    endingLine: Int
+    startingLine: Int
+    typename__: Optional[Literal['BlameRange']] = Field(
+        'BlameRange', alias='__typename'
+    )
+
+
+class Blob(GitObject, Node):
+    """
+    Represents a Git blob.
+    """
+
+    abbreviatedOid: String
+    byteSize: Int
+    commitResourcePath: URI
+    commitUrl: URI
+    id: ID
+    isBinary: Optional[Boolean] = None
+    isTruncated: Boolean
+    oid: GitObjectID
+    repository: Repository
+    text: Optional[String] = None
+    typename__: Optional[Literal['Blob']] = Field('Blob', alias='__typename')
+
+
+class Bot(Actor, Node, UniformResourceLocatable):
+    """
+    A special type of user which takes actions on behalf of GitHub Apps.
+    """
+
+    avatarUrl: URI
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    id: ID
+    login: String
+    resourcePath: URI
+    updatedAt: DateTime
+    url: URI
+    typename__: Optional[Literal['Bot']] = Field('Bot', alias='__typename')
+
+
+class BranchNamePatternParameters(BaseModel):
+    """
+    Parameters to be used for the branch_name_pattern rule
+    """
+
+    name: Optional[String] = None
+    negate: Boolean
+    operator: String
+    pattern: String
+    typename__: Optional[Literal['BranchNamePatternParameters']] = Field(
+        'BranchNamePatternParameters', alias='__typename'
+    )
+
+
+class BranchProtectionRule(Node):
+    """
+    A branch protection rule.
+    """
+
+    allowsDeletions: Boolean
+    allowsForcePushes: Boolean
+    blocksCreations: Boolean
+    branchProtectionRuleConflicts: BranchProtectionRuleConflictConnection
+    bypassForcePushAllowances: BypassForcePushAllowanceConnection
+    bypassPullRequestAllowances: BypassPullRequestAllowanceConnection
+    creator: Optional[Actor] = None
+    databaseId: Optional[Int] = None
+    dismissesStaleReviews: Boolean
+    id: ID
+    isAdminEnforced: Boolean
+    lockAllowsFetchAndMerge: Boolean
+    lockBranch: Boolean
+    matchingRefs: RefConnection
+    pattern: String
+    pushAllowances: PushAllowanceConnection
+    repository: Optional[Repository] = None
+    requireLastPushApproval: Boolean
+    requiredApprovingReviewCount: Optional[Int] = None
+    requiredDeploymentEnvironments: Optional[List[Optional[String]]] = Field(
+        default_factory=list
+    )
+    requiredStatusCheckContexts: Optional[List[Optional[String]]] = Field(
+        default_factory=list
+    )
+    requiredStatusChecks: Optional[List[RequiredStatusCheckDescription]] = Field(
+        default_factory=list
+    )
+    requiresApprovingReviews: Boolean
+    requiresCodeOwnerReviews: Boolean
+    requiresCommitSignatures: Boolean
+    requiresConversationResolution: Boolean
+    requiresDeployments: Boolean
+    requiresLinearHistory: Boolean
+    requiresStatusChecks: Boolean
+    requiresStrictStatusChecks: Boolean
+    restrictsPushes: Boolean
+    restrictsReviewDismissals: Boolean
+    reviewDismissalAllowances: ReviewDismissalAllowanceConnection
+    typename__: Optional[Literal['BranchProtectionRule']] = Field(
+        'BranchProtectionRule', alias='__typename'
+    )
+
+
+class BranchProtectionRuleConflict(BaseModel):
+    """
+    A conflict between two branch protection rules.
+    """
+
+    branchProtectionRule: Optional[BranchProtectionRule] = None
+    conflictingBranchProtectionRule: Optional[BranchProtectionRule] = None
+    ref: Optional[Ref] = None
+    typename__: Optional[Literal['BranchProtectionRuleConflict']] = Field(
+        'BranchProtectionRuleConflict', alias='__typename'
+    )
+
+
+class BranchProtectionRuleConflictConnection(BaseModel):
+    """
+    The connection type for BranchProtectionRuleConflict.
+    """
+
+    edges: Optional[List[Optional[BranchProtectionRuleConflictEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[BranchProtectionRuleConflict]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['BranchProtectionRuleConflictConnection']] = Field(
+        'BranchProtectionRuleConflictConnection', alias='__typename'
+    )
+
+
+class BranchProtectionRuleConflictEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[BranchProtectionRuleConflict] = None
+    typename__: Optional[Literal['BranchProtectionRuleConflictEdge']] = Field(
+        'BranchProtectionRuleConflictEdge', alias='__typename'
+    )
+
+
+class BranchProtectionRuleConnection(BaseModel):
+    """
+    The connection type for BranchProtectionRule.
+    """
+
+    edges: Optional[List[Optional[BranchProtectionRuleEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[BranchProtectionRule]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['BranchProtectionRuleConnection']] = Field(
+        'BranchProtectionRuleConnection', alias='__typename'
+    )
+
+
+class BranchProtectionRuleEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[BranchProtectionRule] = None
+    typename__: Optional[Literal['BranchProtectionRuleEdge']] = Field(
+        'BranchProtectionRuleEdge', alias='__typename'
+    )
+
+
+class BypassForcePushAllowance(Node):
+    """
+    A user, team, or app who has the ability to bypass a force push requirement on a protected branch.
+    """
+
+    actor: Optional[BranchActorAllowanceActor] = None
+    branchProtectionRule: Optional[BranchProtectionRule] = None
+    id: ID
+    typename__: Optional[Literal['BypassForcePushAllowance']] = Field(
+        'BypassForcePushAllowance', alias='__typename'
+    )
+
+
+class BypassForcePushAllowanceConnection(BaseModel):
+    """
+    The connection type for BypassForcePushAllowance.
+    """
+
+    edges: Optional[List[Optional[BypassForcePushAllowanceEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[BypassForcePushAllowance]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['BypassForcePushAllowanceConnection']] = Field(
+        'BypassForcePushAllowanceConnection', alias='__typename'
+    )
+
+
+class BypassForcePushAllowanceEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[BypassForcePushAllowance] = None
+    typename__: Optional[Literal['BypassForcePushAllowanceEdge']] = Field(
+        'BypassForcePushAllowanceEdge', alias='__typename'
+    )
+
+
+class BypassPullRequestAllowance(Node):
+    """
+    A user, team, or app who has the ability to bypass a pull request requirement on a protected branch.
+    """
+
+    actor: Optional[BranchActorAllowanceActor] = None
+    branchProtectionRule: Optional[BranchProtectionRule] = None
+    id: ID
+    typename__: Optional[Literal['BypassPullRequestAllowance']] = Field(
+        'BypassPullRequestAllowance', alias='__typename'
+    )
+
+
+class BypassPullRequestAllowanceConnection(BaseModel):
+    """
+    The connection type for BypassPullRequestAllowance.
+    """
+
+    edges: Optional[List[Optional[BypassPullRequestAllowanceEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[BypassPullRequestAllowance]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['BypassPullRequestAllowanceConnection']] = Field(
+        'BypassPullRequestAllowanceConnection', alias='__typename'
+    )
+
+
+class BypassPullRequestAllowanceEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[BypassPullRequestAllowance] = None
+    typename__: Optional[Literal['BypassPullRequestAllowanceEdge']] = Field(
+        'BypassPullRequestAllowanceEdge', alias='__typename'
+    )
+
+
+class CVSS(BaseModel):
+    """
+    The Common Vulnerability Scoring System
+    """
+
+    score: Float
+    vectorString: Optional[String] = None
+    typename__: Optional[Literal['CVSS']] = Field('CVSS', alias='__typename')
+
+
+class CWE(Node):
+    """
+    A common weakness enumeration
+    """
+
+    cweId: String
+    description: String
+    id: ID
+    name: String
+    typename__: Optional[Literal['CWE']] = Field('CWE', alias='__typename')
+
+
+class CWEConnection(BaseModel):
+    """
+    The connection type for CWE.
+    """
+
+    edges: Optional[List[Optional[CWEEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[CWE]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['CWEConnection']] = Field(
+        'CWEConnection', alias='__typename'
+    )
+
+
+class CWEEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[CWE] = None
+    typename__: Optional[Literal['CWEEdge']] = Field('CWEEdge', alias='__typename')
+
+
+class CancelEnterpriseAdminInvitationPayload(BaseModel):
+    """
+    Autogenerated return type of CancelEnterpriseAdminInvitation
+    """
+
+    clientMutationId: Optional[String] = None
+    invitation: Optional[EnterpriseAdministratorInvitation] = None
+    message: Optional[String] = None
+    typename__: Optional[Literal['CancelEnterpriseAdminInvitationPayload']] = Field(
+        'CancelEnterpriseAdminInvitationPayload', alias='__typename'
+    )
+
+
+class CancelSponsorshipPayload(BaseModel):
+    """
+    Autogenerated return type of CancelSponsorship
+    """
+
+    clientMutationId: Optional[String] = None
+    sponsorsTier: Optional[SponsorsTier] = None
+    typename__: Optional[Literal['CancelSponsorshipPayload']] = Field(
+        'CancelSponsorshipPayload', alias='__typename'
+    )
+
+
+class ChangeUserStatusPayload(BaseModel):
+    """
+    Autogenerated return type of ChangeUserStatus
+    """
+
+    clientMutationId: Optional[String] = None
+    status: Optional[UserStatus] = None
+    typename__: Optional[Literal['ChangeUserStatusPayload']] = Field(
+        'ChangeUserStatusPayload', alias='__typename'
+    )
+
+
+class CheckAnnotation(BaseModel):
+    """
+    A single check annotation.
+    """
+
+    annotationLevel: Optional[CheckAnnotationLevel] = None
+    blobUrl: URI
+    databaseId: Optional[Int] = None
+    location: CheckAnnotationSpan
+    message: String
+    path: String
+    rawDetails: Optional[String] = None
+    title: Optional[String] = None
+    typename__: Optional[Literal['CheckAnnotation']] = Field(
+        'CheckAnnotation', alias='__typename'
+    )
+
+
+class CheckAnnotationConnection(BaseModel):
+    """
+    The connection type for CheckAnnotation.
+    """
+
+    edges: Optional[List[Optional[CheckAnnotationEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[CheckAnnotation]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['CheckAnnotationConnection']] = Field(
+        'CheckAnnotationConnection', alias='__typename'
+    )
+
+
+class CheckAnnotationEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[CheckAnnotation] = None
+    typename__: Optional[Literal['CheckAnnotationEdge']] = Field(
+        'CheckAnnotationEdge', alias='__typename'
+    )
+
+
+class CheckAnnotationPosition(BaseModel):
+    """
+    A character position in a check annotation.
+    """
+
+    column: Optional[Int] = None
+    line: Int
+    typename__: Optional[Literal['CheckAnnotationPosition']] = Field(
+        'CheckAnnotationPosition', alias='__typename'
+    )
+
+
+class CheckAnnotationSpan(BaseModel):
+    """
+    An inclusive pair of positions for a check annotation.
+    """
+
+    end: CheckAnnotationPosition
+    start: CheckAnnotationPosition
+    typename__: Optional[Literal['CheckAnnotationSpan']] = Field(
+        'CheckAnnotationSpan', alias='__typename'
+    )
+
+
+class CheckRun(Node, RequirableByPullRequest, UniformResourceLocatable):
+    """
+    A check run.
+    """
+
+    annotations: Optional[CheckAnnotationConnection] = None
+    checkSuite: CheckSuite
+    completedAt: Optional[DateTime] = None
+    conclusion: Optional[CheckConclusionState] = None
+    databaseId: Optional[Int] = None
+    deployment: Optional[Deployment] = None
+    detailsUrl: Optional[URI] = None
+    externalId: Optional[String] = None
+    id: ID
+    isRequired: Boolean
+    name: String
+    pendingDeploymentRequest: Optional[DeploymentRequest] = None
+    permalink: URI
+    repository: Repository
+    resourcePath: URI
+    startedAt: Optional[DateTime] = None
+    status: CheckStatusState
+    steps: Optional[CheckStepConnection] = None
+    summary: Optional[String] = None
+    text: Optional[String] = None
+    title: Optional[String] = None
+    url: URI
+    typename__: Optional[Literal['CheckRun']] = Field('CheckRun', alias='__typename')
+
+
+class CheckRunConnection(BaseModel):
+    """
+    The connection type for CheckRun.
+    """
+
+    edges: Optional[List[Optional[CheckRunEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[CheckRun]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['CheckRunConnection']] = Field(
+        'CheckRunConnection', alias='__typename'
+    )
+
+
+class CheckRunEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[CheckRun] = None
+    typename__: Optional[Literal['CheckRunEdge']] = Field(
+        'CheckRunEdge', alias='__typename'
+    )
+
+
+class CheckRunStateCount(BaseModel):
+    """
+    Represents a count of the state of a check run.
+    """
+
+    count: Int
+    state: CheckRunState
+    typename__: Optional[Literal['CheckRunStateCount']] = Field(
+        'CheckRunStateCount', alias='__typename'
+    )
+
+
+class CheckStep(BaseModel):
+    """
+    A single check step.
+    """
+
+    completedAt: Optional[DateTime] = None
+    conclusion: Optional[CheckConclusionState] = None
+    externalId: Optional[String] = None
+    name: String
+    number: Int
+    secondsToCompletion: Optional[Int] = None
+    startedAt: Optional[DateTime] = None
+    status: CheckStatusState
+    typename__: Optional[Literal['CheckStep']] = Field('CheckStep', alias='__typename')
+
+
+class CheckStepConnection(BaseModel):
+    """
+    The connection type for CheckStep.
+    """
+
+    edges: Optional[List[Optional[CheckStepEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[CheckStep]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['CheckStepConnection']] = Field(
+        'CheckStepConnection', alias='__typename'
+    )
+
+
+class CheckStepEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[CheckStep] = None
+    typename__: Optional[Literal['CheckStepEdge']] = Field(
+        'CheckStepEdge', alias='__typename'
+    )
+
+
+class CheckSuite(Node):
+    """
+    A check suite.
+    """
+
+    app: Optional[App] = None
+    branch: Optional[Ref] = None
+    checkRuns: Optional[CheckRunConnection] = None
+    commit: Commit
+    conclusion: Optional[CheckConclusionState] = None
+    createdAt: DateTime
+    creator: Optional[User] = None
+    databaseId: Optional[Int] = None
+    id: ID
+    matchingPullRequests: Optional[PullRequestConnection] = None
+    push: Optional[Push] = None
+    repository: Repository
+    resourcePath: URI
+    status: CheckStatusState
+    updatedAt: DateTime
+    url: URI
+    workflowRun: Optional[WorkflowRun] = None
+    typename__: Optional[Literal['CheckSuite']] = Field(
+        'CheckSuite', alias='__typename'
+    )
+
+
+class CheckSuiteConnection(BaseModel):
+    """
+    The connection type for CheckSuite.
+    """
+
+    edges: Optional[List[Optional[CheckSuiteEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[CheckSuite]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['CheckSuiteConnection']] = Field(
+        'CheckSuiteConnection', alias='__typename'
+    )
+
+
+class CheckSuiteEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[CheckSuite] = None
+    typename__: Optional[Literal['CheckSuiteEdge']] = Field(
+        'CheckSuiteEdge', alias='__typename'
+    )
+
+
+class ClearLabelsFromLabelablePayload(BaseModel):
+    """
+    Autogenerated return type of ClearLabelsFromLabelable
+    """
+
+    clientMutationId: Optional[String] = None
+    labelable: Optional[Labelable] = None
+    typename__: Optional[Literal['ClearLabelsFromLabelablePayload']] = Field(
+        'ClearLabelsFromLabelablePayload', alias='__typename'
+    )
+
+
+class ClearProjectV2ItemFieldValuePayload(BaseModel):
+    """
+    Autogenerated return type of ClearProjectV2ItemFieldValue
+    """
+
+    clientMutationId: Optional[String] = None
+    projectV2Item: Optional[ProjectV2Item] = None
+    typename__: Optional[Literal['ClearProjectV2ItemFieldValuePayload']] = Field(
+        'ClearProjectV2ItemFieldValuePayload', alias='__typename'
+    )
+
+
+class CloneProjectPayload(BaseModel):
+    """
+    Autogenerated return type of CloneProject
+    """
+
+    clientMutationId: Optional[String] = None
+    jobStatusId: Optional[String] = None
+    project: Optional[Project] = None
+    typename__: Optional[Literal['CloneProjectPayload']] = Field(
+        'CloneProjectPayload', alias='__typename'
+    )
+
+
+class CloneTemplateRepositoryPayload(BaseModel):
+    """
+    Autogenerated return type of CloneTemplateRepository
+    """
+
+    clientMutationId: Optional[String] = None
+    repository: Optional[Repository] = None
+    typename__: Optional[Literal['CloneTemplateRepositoryPayload']] = Field(
+        'CloneTemplateRepositoryPayload', alias='__typename'
+    )
+
+
+class CloseDiscussionPayload(BaseModel):
+    """
+    Autogenerated return type of CloseDiscussion
+    """
+
+    clientMutationId: Optional[String] = None
+    discussion: Optional[Discussion] = None
+    typename__: Optional[Literal['CloseDiscussionPayload']] = Field(
+        'CloseDiscussionPayload', alias='__typename'
+    )
+
+
+class CloseIssuePayload(BaseModel):
+    """
+    Autogenerated return type of CloseIssue
+    """
+
+    clientMutationId: Optional[String] = None
+    issue: Optional[Issue] = None
+    typename__: Optional[Literal['CloseIssuePayload']] = Field(
+        'CloseIssuePayload', alias='__typename'
+    )
+
+
+class ClosePullRequestPayload(BaseModel):
+    """
+    Autogenerated return type of ClosePullRequest
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['ClosePullRequestPayload']] = Field(
+        'ClosePullRequestPayload', alias='__typename'
+    )
+
+
+class ClosedEvent(Node, UniformResourceLocatable):
+    """
+    Represents a 'closed' event on any `Closable`.
+    """
+
+    actor: Optional[Actor] = None
+    closable: Closable
+    closer: Optional[Closer] = None
+    createdAt: DateTime
+    id: ID
+    resourcePath: URI
+    stateReason: Optional[IssueStateReason] = None
+    url: URI
+    typename__: Optional[Literal['ClosedEvent']] = Field(
+        'ClosedEvent', alias='__typename'
+    )
+
+
+class CodeOfConduct(Node):
+    """
+    The Code of Conduct for a repository
+    """
+
+    body: Optional[String] = None
+    id: ID
+    key: String
+    name: String
+    resourcePath: Optional[URI] = None
+    url: Optional[URI] = None
+    typename__: Optional[Literal['CodeOfConduct']] = Field(
+        'CodeOfConduct', alias='__typename'
+    )
+
+
+class CommentDeletedEvent(Node):
+    """
+    Represents a 'comment_deleted' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    deletedCommentAuthor: Optional[Actor] = None
+    id: ID
+    typename__: Optional[Literal['CommentDeletedEvent']] = Field(
+        'CommentDeletedEvent', alias='__typename'
+    )
+
+
+class Commit(GitObject, Node, Subscribable, UniformResourceLocatable):
+    """
+    Represents a Git commit.
+    """
+
+    abbreviatedOid: String
+    additions: Int
+    associatedPullRequests: Optional[PullRequestConnection] = None
+    author: Optional[GitActor] = None
+    authoredByCommitter: Boolean
+    authoredDate: DateTime
+    authors: GitActorConnection
+    blame: Blame
+    changedFiles: Int
+    changedFilesIfAvailable: Optional[Int] = None
+    checkSuites: Optional[CheckSuiteConnection] = None
+    comments: CommitCommentConnection
+    commitResourcePath: URI
+    commitUrl: URI
+    committedDate: DateTime
+    committedViaWeb: Boolean
+    committer: Optional[GitActor] = None
+    deletions: Int
+    deployments: Optional[DeploymentConnection] = None
+    file: Optional[TreeEntry] = None
+    history: CommitHistoryConnection
+    id: ID
+    message: String
+    messageBody: String
+    messageBodyHTML: HTML
+    messageHeadline: String
+    messageHeadlineHTML: HTML
+    oid: GitObjectID
+    onBehalfOf: Optional[Organization] = None
+    parents: CommitConnection
+    pushedDate: Optional[DateTime] = None
+    repository: Repository
+    resourcePath: URI
+    signature: Optional[GitSignature] = None
+    status: Optional[Status] = None
+    statusCheckRollup: Optional[StatusCheckRollup] = None
+    submodules: SubmoduleConnection
+    tarballUrl: URI
+    tree: Tree
+    treeResourcePath: URI
+    treeUrl: URI
+    url: URI
+    viewerCanSubscribe: Boolean
+    viewerSubscription: Optional[SubscriptionState] = None
+    zipballUrl: URI
+    typename__: Optional[Literal['Commit']] = Field('Commit', alias='__typename')
+
+
+class CommitAuthorEmailPatternParameters(BaseModel):
+    """
+    Parameters to be used for the commit_author_email_pattern rule
+    """
+
+    name: Optional[String] = None
+    negate: Boolean
+    operator: String
+    pattern: String
+    typename__: Optional[Literal['CommitAuthorEmailPatternParameters']] = Field(
+        'CommitAuthorEmailPatternParameters', alias='__typename'
+    )
+
+
+class CommitComment(
+    Comment,
+    Deletable,
+    Minimizable,
+    Node,
+    Reactable,
+    RepositoryNode,
+    Updatable,
+    UpdatableComment,
+):
+    """
+    Represents a comment on a given Commit.
+    """
+
+    author: Optional[Actor] = None
+    authorAssociation: CommentAuthorAssociation
+    body: String
+    bodyHTML: HTML
+    bodyText: String
+    commit: Optional[Commit] = None
+    createdAt: DateTime
+    createdViaEmail: Boolean
+    databaseId: Optional[Int] = None
+    editor: Optional[Actor] = None
+    id: ID
+    includesCreatedEdit: Boolean
+    isMinimized: Boolean
+    lastEditedAt: Optional[DateTime] = None
+    minimizedReason: Optional[String] = None
+    path: Optional[String] = None
+    position: Optional[Int] = None
+    publishedAt: Optional[DateTime] = None
+    reactionGroups: Optional[List[ReactionGroup]] = Field(default_factory=list)
+    reactions: ReactionConnection
+    repository: Repository
+    resourcePath: URI
+    updatedAt: DateTime
+    url: URI
+    userContentEdits: Optional[UserContentEditConnection] = None
+    viewerCanDelete: Boolean
+    viewerCanMinimize: Boolean
+    viewerCanReact: Boolean
+    viewerCanUpdate: Boolean
+    viewerCannotUpdateReasons: List[CommentCannotUpdateReason]
+    viewerDidAuthor: Boolean
+    typename__: Optional[Literal['CommitComment']] = Field(
+        'CommitComment', alias='__typename'
+    )
+
+
+class CommitCommentConnection(BaseModel):
+    """
+    The connection type for CommitComment.
+    """
+
+    edges: Optional[List[Optional[CommitCommentEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[CommitComment]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['CommitCommentConnection']] = Field(
+        'CommitCommentConnection', alias='__typename'
+    )
+
+
+class CommitCommentEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[CommitComment] = None
+    typename__: Optional[Literal['CommitCommentEdge']] = Field(
+        'CommitCommentEdge', alias='__typename'
+    )
+
+
+class CommitCommentThread(Node, RepositoryNode):
+    """
+    A thread of comments on a commit.
+    """
+
+    comments: CommitCommentConnection
+    commit: Optional[Commit] = None
+    id: ID
+    path: Optional[String] = None
+    position: Optional[Int] = None
+    repository: Repository
+    typename__: Optional[Literal['CommitCommentThread']] = Field(
+        'CommitCommentThread', alias='__typename'
+    )
+
+
+class CommitConnection(BaseModel):
+    """
+    The connection type for Commit.
+    """
+
+    edges: Optional[List[Optional[CommitEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Commit]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['CommitConnection']] = Field(
+        'CommitConnection', alias='__typename'
+    )
+
+
+class CommitContributionsByRepository(BaseModel):
+    """
+    This aggregates commits made by a user within one repository.
+    """
+
+    contributions: CreatedCommitContributionConnection
+    repository: Repository
+    resourcePath: URI
+    url: URI
+    typename__: Optional[Literal['CommitContributionsByRepository']] = Field(
+        'CommitContributionsByRepository', alias='__typename'
+    )
+
+
+class CommitEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Commit] = None
+    typename__: Optional[Literal['CommitEdge']] = Field(
+        'CommitEdge', alias='__typename'
+    )
+
+
+class CommitHistoryConnection(BaseModel):
+    """
+    The connection type for Commit.
+    """
+
+    edges: Optional[List[Optional[CommitEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Commit]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['CommitHistoryConnection']] = Field(
+        'CommitHistoryConnection', alias='__typename'
+    )
+
+
+class CommitMessagePatternParameters(BaseModel):
+    """
+    Parameters to be used for the commit_message_pattern rule
+    """
+
+    name: Optional[String] = None
+    negate: Boolean
+    operator: String
+    pattern: String
+    typename__: Optional[Literal['CommitMessagePatternParameters']] = Field(
+        'CommitMessagePatternParameters', alias='__typename'
+    )
+
+
+class CommitterEmailPatternParameters(BaseModel):
+    """
+    Parameters to be used for the committer_email_pattern rule
+    """
+
+    name: Optional[String] = None
+    negate: Boolean
+    operator: String
+    pattern: String
+    typename__: Optional[Literal['CommitterEmailPatternParameters']] = Field(
+        'CommitterEmailPatternParameters', alias='__typename'
+    )
+
+
+class Comparison(Node):
+    """
+    Represents a comparison between two commit revisions.
+    """
+
+    aheadBy: Int
+    baseTarget: GitObject
+    behindBy: Int
+    commits: ComparisonCommitConnection
+    headTarget: GitObject
+    id: ID
+    status: ComparisonStatus
+    typename__: Optional[Literal['Comparison']] = Field(
+        'Comparison', alias='__typename'
+    )
+
+
+class ComparisonCommitConnection(BaseModel):
+    """
+    The connection type for Commit.
+    """
+
+    authorCount: Int
+    edges: Optional[List[Optional[CommitEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Commit]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ComparisonCommitConnection']] = Field(
+        'ComparisonCommitConnection', alias='__typename'
+    )
+
+
+class ConnectedEvent(Node):
+    """
+    Represents a 'connected' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    isCrossRepository: Boolean
+    source: ReferencedSubject
+    subject: ReferencedSubject
+    typename__: Optional[Literal['ConnectedEvent']] = Field(
+        'ConnectedEvent', alias='__typename'
+    )
+
+
+class ContributingGuidelines(BaseModel):
+    """
+    The Contributing Guidelines for a repository.
+    """
+
+    body: Optional[String] = None
+    resourcePath: Optional[URI] = None
+    url: Optional[URI] = None
+    typename__: Optional[Literal['ContributingGuidelines']] = Field(
+        'ContributingGuidelines', alias='__typename'
+    )
+
+
+class ContributionCalendar(BaseModel):
+    """
+    A calendar of contributions made on GitHub by a user.
+    """
+
+    colors: List[String]
+    isHalloween: Boolean
+    months: List[ContributionCalendarMonth]
+    totalContributions: Int
+    weeks: List[ContributionCalendarWeek]
+    typename__: Optional[Literal['ContributionCalendar']] = Field(
+        'ContributionCalendar', alias='__typename'
+    )
+
+
+class ContributionCalendarDay(BaseModel):
+    """
+    Represents a single day of contributions on GitHub by a user.
+    """
+
+    color: String
+    contributionCount: Int
+    contributionLevel: ContributionLevel
+    date: Date
+    weekday: Int
+    typename__: Optional[Literal['ContributionCalendarDay']] = Field(
+        'ContributionCalendarDay', alias='__typename'
+    )
+
+
+class ContributionCalendarMonth(BaseModel):
+    """
+    A month of contributions in a user's contribution graph.
+    """
+
+    firstDay: Date
+    name: String
+    totalWeeks: Int
+    year: Int
+    typename__: Optional[Literal['ContributionCalendarMonth']] = Field(
+        'ContributionCalendarMonth', alias='__typename'
+    )
+
+
+class ContributionCalendarWeek(BaseModel):
+    """
+    A week of contributions in a user's contribution graph.
+    """
+
+    contributionDays: List[ContributionCalendarDay]
+    firstDay: Date
+    typename__: Optional[Literal['ContributionCalendarWeek']] = Field(
+        'ContributionCalendarWeek', alias='__typename'
+    )
+
+
+class ContributionsCollection(BaseModel):
+    """
+    A contributions collection aggregates contributions such as opened issues and commits created by a user.
+    """
+
+    commitContributionsByRepository: List[CommitContributionsByRepository]
+    contributionCalendar: ContributionCalendar
+    contributionYears: List[Int]
+    doesEndInCurrentMonth: Boolean
+    earliestRestrictedContributionDate: Optional[Date] = None
+    endedAt: DateTime
+    firstIssueContribution: Optional[CreatedIssueOrRestrictedContribution] = None
+    firstPullRequestContribution: Optional[
+        CreatedPullRequestOrRestrictedContribution
+    ] = None
+    firstRepositoryContribution: Optional[
+        CreatedRepositoryOrRestrictedContribution
+    ] = None
+    hasActivityInThePast: Boolean
+    hasAnyContributions: Boolean
+    hasAnyRestrictedContributions: Boolean
+    isSingleDay: Boolean
+    issueContributions: CreatedIssueContributionConnection
+    issueContributionsByRepository: List[IssueContributionsByRepository]
+    joinedGitHubContribution: Optional[JoinedGitHubContribution] = None
+    latestRestrictedContributionDate: Optional[Date] = None
+    mostRecentCollectionWithActivity: Optional[ContributionsCollection] = None
+    mostRecentCollectionWithoutActivity: Optional[ContributionsCollection] = None
+    popularIssueContribution: Optional[CreatedIssueContribution] = None
+    popularPullRequestContribution: Optional[CreatedPullRequestContribution] = None
+    pullRequestContributions: CreatedPullRequestContributionConnection
+    pullRequestContributionsByRepository: List[PullRequestContributionsByRepository]
+    pullRequestReviewContributions: CreatedPullRequestReviewContributionConnection
+    pullRequestReviewContributionsByRepository: List[
+        PullRequestReviewContributionsByRepository
+    ]
+    repositoryContributions: CreatedRepositoryContributionConnection
+    restrictedContributionsCount: Int
+    startedAt: DateTime
+    totalCommitContributions: Int
+    totalIssueContributions: Int
+    totalPullRequestContributions: Int
+    totalPullRequestReviewContributions: Int
+    totalRepositoriesWithContributedCommits: Int
+    totalRepositoriesWithContributedIssues: Int
+    totalRepositoriesWithContributedPullRequestReviews: Int
+    totalRepositoriesWithContributedPullRequests: Int
+    totalRepositoryContributions: Int
+    user: User
+    typename__: Optional[Literal['ContributionsCollection']] = Field(
+        'ContributionsCollection', alias='__typename'
+    )
+
+
+class ConvertProjectCardNoteToIssuePayload(BaseModel):
+    """
+    Autogenerated return type of ConvertProjectCardNoteToIssue
+    """
+
+    clientMutationId: Optional[String] = None
+    projectCard: Optional[ProjectCard] = None
+    typename__: Optional[Literal['ConvertProjectCardNoteToIssuePayload']] = Field(
+        'ConvertProjectCardNoteToIssuePayload', alias='__typename'
+    )
+
+
+class ConvertPullRequestToDraftPayload(BaseModel):
+    """
+    Autogenerated return type of ConvertPullRequestToDraft
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['ConvertPullRequestToDraftPayload']] = Field(
+        'ConvertPullRequestToDraftPayload', alias='__typename'
+    )
+
+
+class ConvertToDraftEvent(Node, UniformResourceLocatable):
+    """
+    Represents a 'convert_to_draft' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    pullRequest: PullRequest
+    resourcePath: URI
+    url: URI
+    typename__: Optional[Literal['ConvertToDraftEvent']] = Field(
+        'ConvertToDraftEvent', alias='__typename'
+    )
+
+
+class ConvertedNoteToIssueEvent(Node):
+    """
+    Represents a 'converted_note_to_issue' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    id: ID
+    project: Optional[Project] = None
+    projectCard: Optional[ProjectCard] = None
+    projectColumnName: String
+    typename__: Optional[Literal['ConvertedNoteToIssueEvent']] = Field(
+        'ConvertedNoteToIssueEvent', alias='__typename'
+    )
+
+
+class ConvertedToDiscussionEvent(Node):
+    """
+    Represents a 'converted_to_discussion' event on a given issue.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    discussion: Optional[Discussion] = None
+    id: ID
+    typename__: Optional[Literal['ConvertedToDiscussionEvent']] = Field(
+        'ConvertedToDiscussionEvent', alias='__typename'
+    )
+
+
+class CopyProjectV2Payload(BaseModel):
+    """
+    Autogenerated return type of CopyProjectV2
+    """
+
+    clientMutationId: Optional[String] = None
+    projectV2: Optional[ProjectV2] = None
+    typename__: Optional[Literal['CopyProjectV2Payload']] = Field(
+        'CopyProjectV2Payload', alias='__typename'
+    )
+
+
+class CreateAttributionInvitationPayload(BaseModel):
+    """
+    Autogenerated return type of CreateAttributionInvitation
+    """
+
+    clientMutationId: Optional[String] = None
+    owner: Optional[Organization] = None
+    source: Optional[Claimable] = None
+    target: Optional[Claimable] = None
+    typename__: Optional[Literal['CreateAttributionInvitationPayload']] = Field(
+        'CreateAttributionInvitationPayload', alias='__typename'
+    )
+
+
+class CreateBranchProtectionRulePayload(BaseModel):
+    """
+    Autogenerated return type of CreateBranchProtectionRule
+    """
+
+    branchProtectionRule: Optional[BranchProtectionRule] = None
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['CreateBranchProtectionRulePayload']] = Field(
+        'CreateBranchProtectionRulePayload', alias='__typename'
+    )
+
+
+class CreateCheckRunPayload(BaseModel):
+    """
+    Autogenerated return type of CreateCheckRun
+    """
+
+    checkRun: Optional[CheckRun] = None
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['CreateCheckRunPayload']] = Field(
+        'CreateCheckRunPayload', alias='__typename'
+    )
+
+
+class CreateCheckSuitePayload(BaseModel):
+    """
+    Autogenerated return type of CreateCheckSuite
+    """
+
+    checkSuite: Optional[CheckSuite] = None
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['CreateCheckSuitePayload']] = Field(
+        'CreateCheckSuitePayload', alias='__typename'
+    )
+
+
+class CreateCommitOnBranchPayload(BaseModel):
+    """
+    Autogenerated return type of CreateCommitOnBranch
+    """
+
+    clientMutationId: Optional[String] = None
+    commit: Optional[Commit] = None
+    ref: Optional[Ref] = None
+    typename__: Optional[Literal['CreateCommitOnBranchPayload']] = Field(
+        'CreateCommitOnBranchPayload', alias='__typename'
+    )
+
+
+class CreateDeploymentPayload(BaseModel):
+    """
+    Autogenerated return type of CreateDeployment
+    """
+
+    autoMerged: Optional[Boolean] = None
+    clientMutationId: Optional[String] = None
+    deployment: Optional[Deployment] = None
+    typename__: Optional[Literal['CreateDeploymentPayload']] = Field(
+        'CreateDeploymentPayload', alias='__typename'
+    )
+
+
+class CreateDeploymentStatusPayload(BaseModel):
+    """
+    Autogenerated return type of CreateDeploymentStatus
+    """
+
+    clientMutationId: Optional[String] = None
+    deploymentStatus: Optional[DeploymentStatus] = None
+    typename__: Optional[Literal['CreateDeploymentStatusPayload']] = Field(
+        'CreateDeploymentStatusPayload', alias='__typename'
+    )
+
+
+class CreateDiscussionPayload(BaseModel):
+    """
+    Autogenerated return type of CreateDiscussion
+    """
+
+    clientMutationId: Optional[String] = None
+    discussion: Optional[Discussion] = None
+    typename__: Optional[Literal['CreateDiscussionPayload']] = Field(
+        'CreateDiscussionPayload', alias='__typename'
+    )
+
+
+class CreateEnterpriseOrganizationPayload(BaseModel):
+    """
+    Autogenerated return type of CreateEnterpriseOrganization
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    organization: Optional[Organization] = None
+    typename__: Optional[Literal['CreateEnterpriseOrganizationPayload']] = Field(
+        'CreateEnterpriseOrganizationPayload', alias='__typename'
+    )
+
+
+class CreateEnvironmentPayload(BaseModel):
+    """
+    Autogenerated return type of CreateEnvironment
+    """
+
+    clientMutationId: Optional[String] = None
+    environment: Optional[Environment] = None
+    typename__: Optional[Literal['CreateEnvironmentPayload']] = Field(
+        'CreateEnvironmentPayload', alias='__typename'
+    )
+
+
+class CreateIpAllowListEntryPayload(BaseModel):
+    """
+    Autogenerated return type of CreateIpAllowListEntry
+    """
+
+    clientMutationId: Optional[String] = None
+    ipAllowListEntry: Optional[IpAllowListEntry] = None
+    typename__: Optional[Literal['CreateIpAllowListEntryPayload']] = Field(
+        'CreateIpAllowListEntryPayload', alias='__typename'
+    )
+
+
+class CreateIssuePayload(BaseModel):
+    """
+    Autogenerated return type of CreateIssue
+    """
+
+    clientMutationId: Optional[String] = None
+    issue: Optional[Issue] = None
+    typename__: Optional[Literal['CreateIssuePayload']] = Field(
+        'CreateIssuePayload', alias='__typename'
+    )
+
+
+class CreateLabelPayload(BaseModel):
+    """
+    Autogenerated return type of CreateLabel
+    """
+
+    clientMutationId: Optional[String] = None
+    label: Optional[Label] = None
+    typename__: Optional[Literal['CreateLabelPayload']] = Field(
+        'CreateLabelPayload', alias='__typename'
+    )
+
+
+class CreateLinkedBranchPayload(BaseModel):
+    """
+    Autogenerated return type of CreateLinkedBranch
+    """
+
+    clientMutationId: Optional[String] = None
+    issue: Optional[Issue] = None
+    linkedBranch: Optional[LinkedBranch] = None
+    typename__: Optional[Literal['CreateLinkedBranchPayload']] = Field(
+        'CreateLinkedBranchPayload', alias='__typename'
+    )
+
+
+class CreateMigrationSourcePayload(BaseModel):
+    """
+    Autogenerated return type of CreateMigrationSource
+    """
+
+    clientMutationId: Optional[String] = None
+    migrationSource: Optional[MigrationSource] = None
+    typename__: Optional[Literal['CreateMigrationSourcePayload']] = Field(
+        'CreateMigrationSourcePayload', alias='__typename'
+    )
+
+
+class CreateProjectPayload(BaseModel):
+    """
+    Autogenerated return type of CreateProject
+    """
+
+    clientMutationId: Optional[String] = None
+    project: Optional[Project] = None
+    typename__: Optional[Literal['CreateProjectPayload']] = Field(
+        'CreateProjectPayload', alias='__typename'
+    )
+
+
+class CreateProjectV2FieldPayload(BaseModel):
+    """
+    Autogenerated return type of CreateProjectV2Field
+    """
+
+    clientMutationId: Optional[String] = None
+    projectV2Field: Optional[ProjectV2FieldConfiguration] = None
+    typename__: Optional[Literal['CreateProjectV2FieldPayload']] = Field(
+        'CreateProjectV2FieldPayload', alias='__typename'
+    )
+
+
+class CreateProjectV2Payload(BaseModel):
+    """
+    Autogenerated return type of CreateProjectV2
+    """
+
+    clientMutationId: Optional[String] = None
+    projectV2: Optional[ProjectV2] = None
+    typename__: Optional[Literal['CreateProjectV2Payload']] = Field(
+        'CreateProjectV2Payload', alias='__typename'
+    )
+
+
+class CreatePullRequestPayload(BaseModel):
+    """
+    Autogenerated return type of CreatePullRequest
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['CreatePullRequestPayload']] = Field(
+        'CreatePullRequestPayload', alias='__typename'
+    )
+
+
+class CreateRefPayload(BaseModel):
+    """
+    Autogenerated return type of CreateRef
+    """
+
+    clientMutationId: Optional[String] = None
+    ref: Optional[Ref] = None
+    typename__: Optional[Literal['CreateRefPayload']] = Field(
+        'CreateRefPayload', alias='__typename'
+    )
+
+
+class CreateRepositoryPayload(BaseModel):
+    """
+    Autogenerated return type of CreateRepository
+    """
+
+    clientMutationId: Optional[String] = None
+    repository: Optional[Repository] = None
+    typename__: Optional[Literal['CreateRepositoryPayload']] = Field(
+        'CreateRepositoryPayload', alias='__typename'
+    )
+
+
+class CreateRepositoryRulesetPayload(BaseModel):
+    """
+    Autogenerated return type of CreateRepositoryRuleset
+    """
+
+    clientMutationId: Optional[String] = None
+    ruleset: Optional[RepositoryRuleset] = None
+    typename__: Optional[Literal['CreateRepositoryRulesetPayload']] = Field(
+        'CreateRepositoryRulesetPayload', alias='__typename'
+    )
+
+
+class CreateSponsorsListingPayload(BaseModel):
+    """
+    Autogenerated return type of CreateSponsorsListing
+    """
+
+    clientMutationId: Optional[String] = None
+    sponsorsListing: Optional[SponsorsListing] = None
+    typename__: Optional[Literal['CreateSponsorsListingPayload']] = Field(
+        'CreateSponsorsListingPayload', alias='__typename'
+    )
+
+
+class CreateSponsorsTierPayload(BaseModel):
+    """
+    Autogenerated return type of CreateSponsorsTier
+    """
+
+    clientMutationId: Optional[String] = None
+    sponsorsTier: Optional[SponsorsTier] = None
+    typename__: Optional[Literal['CreateSponsorsTierPayload']] = Field(
+        'CreateSponsorsTierPayload', alias='__typename'
+    )
+
+
+class CreateSponsorshipPayload(BaseModel):
+    """
+    Autogenerated return type of CreateSponsorship
+    """
+
+    clientMutationId: Optional[String] = None
+    sponsorship: Optional[Sponsorship] = None
+    typename__: Optional[Literal['CreateSponsorshipPayload']] = Field(
+        'CreateSponsorshipPayload', alias='__typename'
+    )
+
+
+class CreateSponsorshipsPayload(BaseModel):
+    """
+    Autogenerated return type of CreateSponsorships
+    """
+
+    clientMutationId: Optional[String] = None
+    sponsorables: Optional[List[Sponsorable]] = Field(default_factory=list)
+    typename__: Optional[Literal['CreateSponsorshipsPayload']] = Field(
+        'CreateSponsorshipsPayload', alias='__typename'
+    )
+
+
+class CreateTeamDiscussionCommentPayload(BaseModel):
+    """
+    Autogenerated return type of CreateTeamDiscussionComment
+    """
+
+    clientMutationId: Optional[String] = None
+    teamDiscussionComment: Optional[TeamDiscussionComment] = None
+    typename__: Optional[Literal['CreateTeamDiscussionCommentPayload']] = Field(
+        'CreateTeamDiscussionCommentPayload', alias='__typename'
+    )
+
+
+class CreateTeamDiscussionPayload(BaseModel):
+    """
+    Autogenerated return type of CreateTeamDiscussion
+    """
+
+    clientMutationId: Optional[String] = None
+    teamDiscussion: Optional[TeamDiscussion] = None
+    typename__: Optional[Literal['CreateTeamDiscussionPayload']] = Field(
+        'CreateTeamDiscussionPayload', alias='__typename'
+    )
+
+
+class CreatedCommitContribution(Contribution):
+    """
+    Represents the contribution a user made by committing to a repository.
+    """
+
+    commitCount: Int
+    isRestricted: Boolean
+    occurredAt: DateTime
+    repository: Repository
+    resourcePath: URI
+    url: URI
+    user: User
+    typename__: Optional[Literal['CreatedCommitContribution']] = Field(
+        'CreatedCommitContribution', alias='__typename'
+    )
+
+
+class CreatedCommitContributionConnection(BaseModel):
+    """
+    The connection type for CreatedCommitContribution.
+    """
+
+    edges: Optional[List[Optional[CreatedCommitContributionEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[CreatedCommitContribution]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['CreatedCommitContributionConnection']] = Field(
+        'CreatedCommitContributionConnection', alias='__typename'
+    )
+
+
+class CreatedCommitContributionEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[CreatedCommitContribution] = None
+    typename__: Optional[Literal['CreatedCommitContributionEdge']] = Field(
+        'CreatedCommitContributionEdge', alias='__typename'
+    )
+
+
+class CreatedIssueContribution(Contribution):
+    """
+    Represents the contribution a user made on GitHub by opening an issue.
+    """
+
+    isRestricted: Boolean
+    issue: Issue
+    occurredAt: DateTime
+    resourcePath: URI
+    url: URI
+    user: User
+    typename__: Optional[Literal['CreatedIssueContribution']] = Field(
+        'CreatedIssueContribution', alias='__typename'
+    )
+
+
+class CreatedIssueContributionConnection(BaseModel):
+    """
+    The connection type for CreatedIssueContribution.
+    """
+
+    edges: Optional[List[Optional[CreatedIssueContributionEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[CreatedIssueContribution]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['CreatedIssueContributionConnection']] = Field(
+        'CreatedIssueContributionConnection', alias='__typename'
+    )
+
+
+class CreatedIssueContributionEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[CreatedIssueContribution] = None
+    typename__: Optional[Literal['CreatedIssueContributionEdge']] = Field(
+        'CreatedIssueContributionEdge', alias='__typename'
+    )
+
+
+class CreatedPullRequestContribution(Contribution):
+    """
+    Represents the contribution a user made on GitHub by opening a pull request.
+    """
+
+    isRestricted: Boolean
+    occurredAt: DateTime
+    pullRequest: PullRequest
+    resourcePath: URI
+    url: URI
+    user: User
+    typename__: Optional[Literal['CreatedPullRequestContribution']] = Field(
+        'CreatedPullRequestContribution', alias='__typename'
+    )
+
+
+class CreatedPullRequestContributionConnection(BaseModel):
+    """
+    The connection type for CreatedPullRequestContribution.
+    """
+
+    edges: Optional[List[Optional[CreatedPullRequestContributionEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[CreatedPullRequestContribution]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['CreatedPullRequestContributionConnection']] = Field(
+        'CreatedPullRequestContributionConnection', alias='__typename'
+    )
+
+
+class CreatedPullRequestContributionEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[CreatedPullRequestContribution] = None
+    typename__: Optional[Literal['CreatedPullRequestContributionEdge']] = Field(
+        'CreatedPullRequestContributionEdge', alias='__typename'
+    )
+
+
+class CreatedPullRequestReviewContribution(Contribution):
+    """
+    Represents the contribution a user made by leaving a review on a pull request.
+    """
+
+    isRestricted: Boolean
+    occurredAt: DateTime
+    pullRequest: PullRequest
+    pullRequestReview: PullRequestReview
+    repository: Repository
+    resourcePath: URI
+    url: URI
+    user: User
+    typename__: Optional[Literal['CreatedPullRequestReviewContribution']] = Field(
+        'CreatedPullRequestReviewContribution', alias='__typename'
+    )
+
+
+class CreatedPullRequestReviewContributionConnection(BaseModel):
+    """
+    The connection type for CreatedPullRequestReviewContribution.
+    """
+
+    edges: Optional[List[Optional[CreatedPullRequestReviewContributionEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[CreatedPullRequestReviewContribution]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[
+        Literal['CreatedPullRequestReviewContributionConnection']
+    ] = Field('CreatedPullRequestReviewContributionConnection', alias='__typename')
+
+
+class CreatedPullRequestReviewContributionEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[CreatedPullRequestReviewContribution] = None
+    typename__: Optional[Literal['CreatedPullRequestReviewContributionEdge']] = Field(
+        'CreatedPullRequestReviewContributionEdge', alias='__typename'
+    )
+
+
+class CreatedRepositoryContribution(Contribution):
+    """
+    Represents the contribution a user made on GitHub by creating a repository.
+    """
+
+    isRestricted: Boolean
+    occurredAt: DateTime
+    repository: Repository
+    resourcePath: URI
+    url: URI
+    user: User
+    typename__: Optional[Literal['CreatedRepositoryContribution']] = Field(
+        'CreatedRepositoryContribution', alias='__typename'
+    )
+
+
+class CreatedRepositoryContributionConnection(BaseModel):
+    """
+    The connection type for CreatedRepositoryContribution.
+    """
+
+    edges: Optional[List[Optional[CreatedRepositoryContributionEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[CreatedRepositoryContribution]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['CreatedRepositoryContributionConnection']] = Field(
+        'CreatedRepositoryContributionConnection', alias='__typename'
+    )
+
+
+class CreatedRepositoryContributionEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[CreatedRepositoryContribution] = None
+    typename__: Optional[Literal['CreatedRepositoryContributionEdge']] = Field(
+        'CreatedRepositoryContributionEdge', alias='__typename'
+    )
+
+
+class CrossReferencedEvent(Node, UniformResourceLocatable):
+    """
+    Represents a mention made by one issue or pull request to another.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    isCrossRepository: Boolean
+    referencedAt: DateTime
+    resourcePath: URI
+    source: ReferencedSubject
+    target: ReferencedSubject
+    url: URI
+    willCloseTarget: Boolean
+    typename__: Optional[Literal['CrossReferencedEvent']] = Field(
+        'CrossReferencedEvent', alias='__typename'
+    )
+
+
+class DeclineTopicSuggestionPayload(BaseModel):
+    """
+    Autogenerated return type of DeclineTopicSuggestion
+    """
+
+    clientMutationId: Optional[String] = None
+    topic: Optional[Topic] = None
+    typename__: Optional[Literal['DeclineTopicSuggestionPayload']] = Field(
+        'DeclineTopicSuggestionPayload', alias='__typename'
+    )
+
+
+class DeleteBranchProtectionRulePayload(BaseModel):
+    """
+    Autogenerated return type of DeleteBranchProtectionRule
+    """
+
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['DeleteBranchProtectionRulePayload']] = Field(
+        'DeleteBranchProtectionRulePayload', alias='__typename'
+    )
+
+
+class DeleteDeploymentPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteDeployment
+    """
+
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['DeleteDeploymentPayload']] = Field(
+        'DeleteDeploymentPayload', alias='__typename'
+    )
+
+
+class DeleteDiscussionCommentPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteDiscussionComment
+    """
+
+    clientMutationId: Optional[String] = None
+    comment: Optional[DiscussionComment] = None
+    typename__: Optional[Literal['DeleteDiscussionCommentPayload']] = Field(
+        'DeleteDiscussionCommentPayload', alias='__typename'
+    )
+
+
+class DeleteDiscussionPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteDiscussion
+    """
+
+    clientMutationId: Optional[String] = None
+    discussion: Optional[Discussion] = None
+    typename__: Optional[Literal['DeleteDiscussionPayload']] = Field(
+        'DeleteDiscussionPayload', alias='__typename'
+    )
+
+
+class DeleteEnvironmentPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteEnvironment
+    """
+
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['DeleteEnvironmentPayload']] = Field(
+        'DeleteEnvironmentPayload', alias='__typename'
+    )
+
+
+class DeleteIpAllowListEntryPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteIpAllowListEntry
+    """
+
+    clientMutationId: Optional[String] = None
+    ipAllowListEntry: Optional[IpAllowListEntry] = None
+    typename__: Optional[Literal['DeleteIpAllowListEntryPayload']] = Field(
+        'DeleteIpAllowListEntryPayload', alias='__typename'
+    )
+
+
+class DeleteIssueCommentPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteIssueComment
+    """
+
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['DeleteIssueCommentPayload']] = Field(
+        'DeleteIssueCommentPayload', alias='__typename'
+    )
+
+
+class DeleteIssuePayload(BaseModel):
+    """
+    Autogenerated return type of DeleteIssue
+    """
+
+    clientMutationId: Optional[String] = None
+    repository: Optional[Repository] = None
+    typename__: Optional[Literal['DeleteIssuePayload']] = Field(
+        'DeleteIssuePayload', alias='__typename'
+    )
+
+
+class DeleteLabelPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteLabel
+    """
+
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['DeleteLabelPayload']] = Field(
+        'DeleteLabelPayload', alias='__typename'
+    )
+
+
+class DeleteLinkedBranchPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteLinkedBranch
+    """
+
+    clientMutationId: Optional[String] = None
+    issue: Optional[Issue] = None
+    typename__: Optional[Literal['DeleteLinkedBranchPayload']] = Field(
+        'DeleteLinkedBranchPayload', alias='__typename'
+    )
+
+
+class DeletePackageVersionPayload(BaseModel):
+    """
+    Autogenerated return type of DeletePackageVersion
+    """
+
+    clientMutationId: Optional[String] = None
+    success: Optional[Boolean] = None
+    typename__: Optional[Literal['DeletePackageVersionPayload']] = Field(
+        'DeletePackageVersionPayload', alias='__typename'
+    )
+
+
+class DeleteProjectCardPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteProjectCard
+    """
+
+    clientMutationId: Optional[String] = None
+    column: Optional[ProjectColumn] = None
+    deletedCardId: Optional[ID] = None
+    typename__: Optional[Literal['DeleteProjectCardPayload']] = Field(
+        'DeleteProjectCardPayload', alias='__typename'
+    )
+
+
+class DeleteProjectColumnPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteProjectColumn
+    """
+
+    clientMutationId: Optional[String] = None
+    deletedColumnId: Optional[ID] = None
+    project: Optional[Project] = None
+    typename__: Optional[Literal['DeleteProjectColumnPayload']] = Field(
+        'DeleteProjectColumnPayload', alias='__typename'
+    )
+
+
+class DeleteProjectPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteProject
+    """
+
+    clientMutationId: Optional[String] = None
+    owner: Optional[ProjectOwner] = None
+    typename__: Optional[Literal['DeleteProjectPayload']] = Field(
+        'DeleteProjectPayload', alias='__typename'
+    )
+
+
+class DeleteProjectV2FieldPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteProjectV2Field
+    """
+
+    clientMutationId: Optional[String] = None
+    projectV2Field: Optional[ProjectV2FieldConfiguration] = None
+    typename__: Optional[Literal['DeleteProjectV2FieldPayload']] = Field(
+        'DeleteProjectV2FieldPayload', alias='__typename'
+    )
+
+
+class DeleteProjectV2ItemPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteProjectV2Item
+    """
+
+    clientMutationId: Optional[String] = None
+    deletedItemId: Optional[ID] = None
+    typename__: Optional[Literal['DeleteProjectV2ItemPayload']] = Field(
+        'DeleteProjectV2ItemPayload', alias='__typename'
+    )
+
+
+class DeleteProjectV2Payload(BaseModel):
+    """
+    Autogenerated return type of DeleteProjectV2
+    """
+
+    clientMutationId: Optional[String] = None
+    projectV2: Optional[ProjectV2] = None
+    typename__: Optional[Literal['DeleteProjectV2Payload']] = Field(
+        'DeleteProjectV2Payload', alias='__typename'
+    )
+
+
+class DeleteProjectV2WorkflowPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteProjectV2Workflow
+    """
+
+    clientMutationId: Optional[String] = None
+    deletedWorkflowId: Optional[ID] = None
+    projectV2: Optional[ProjectV2] = None
+    typename__: Optional[Literal['DeleteProjectV2WorkflowPayload']] = Field(
+        'DeleteProjectV2WorkflowPayload', alias='__typename'
+    )
+
+
+class DeletePullRequestReviewCommentPayload(BaseModel):
+    """
+    Autogenerated return type of DeletePullRequestReviewComment
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequestReview: Optional[PullRequestReview] = None
+    pullRequestReviewComment: Optional[PullRequestReviewComment] = None
+    typename__: Optional[Literal['DeletePullRequestReviewCommentPayload']] = Field(
+        'DeletePullRequestReviewCommentPayload', alias='__typename'
+    )
+
+
+class DeletePullRequestReviewPayload(BaseModel):
+    """
+    Autogenerated return type of DeletePullRequestReview
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequestReview: Optional[PullRequestReview] = None
+    typename__: Optional[Literal['DeletePullRequestReviewPayload']] = Field(
+        'DeletePullRequestReviewPayload', alias='__typename'
+    )
+
+
+class DeleteRefPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteRef
+    """
+
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['DeleteRefPayload']] = Field(
+        'DeleteRefPayload', alias='__typename'
+    )
+
+
+class DeleteRepositoryRulesetPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteRepositoryRuleset
+    """
+
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['DeleteRepositoryRulesetPayload']] = Field(
+        'DeleteRepositoryRulesetPayload', alias='__typename'
+    )
+
+
+class DeleteTeamDiscussionCommentPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteTeamDiscussionComment
+    """
+
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['DeleteTeamDiscussionCommentPayload']] = Field(
+        'DeleteTeamDiscussionCommentPayload', alias='__typename'
+    )
+
+
+class DeleteTeamDiscussionPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteTeamDiscussion
+    """
+
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['DeleteTeamDiscussionPayload']] = Field(
+        'DeleteTeamDiscussionPayload', alias='__typename'
+    )
+
+
+class DeleteVerifiableDomainPayload(BaseModel):
+    """
+    Autogenerated return type of DeleteVerifiableDomain
+    """
+
+    clientMutationId: Optional[String] = None
+    owner: Optional[VerifiableDomainOwner] = None
+    typename__: Optional[Literal['DeleteVerifiableDomainPayload']] = Field(
+        'DeleteVerifiableDomainPayload', alias='__typename'
+    )
+
+
+class DemilestonedEvent(Node):
+    """
+    Represents a 'demilestoned' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    milestoneTitle: String
+    subject: MilestoneItem
+    typename__: Optional[Literal['DemilestonedEvent']] = Field(
+        'DemilestonedEvent', alias='__typename'
+    )
+
+
+class DependabotUpdate(RepositoryNode):
+    """
+    A Dependabot Update for a dependency in a repository
+    """
+
+    error: Optional[DependabotUpdateError] = None
+    pullRequest: Optional[PullRequest] = None
+    repository: Repository
+    typename__: Optional[Literal['DependabotUpdate']] = Field(
+        'DependabotUpdate', alias='__typename'
+    )
+
+
+class DependabotUpdateError(BaseModel):
+    """
+    An error produced from a Dependabot Update
+    """
+
+    body: String
+    errorType: String
+    title: String
+    typename__: Optional[Literal['DependabotUpdateError']] = Field(
+        'DependabotUpdateError', alias='__typename'
+    )
+
+
+class DependencyGraphDependency(BaseModel):
+    """
+    A dependency manifest entry
+    """
+
+    hasDependencies: Boolean
+    packageLabel: String
+    packageManager: Optional[String] = None
+    packageName: String
+    repository: Optional[Repository] = None
+    requirements: String
+    typename__: Optional[Literal['DependencyGraphDependency']] = Field(
+        'DependencyGraphDependency', alias='__typename'
+    )
+
+
+class DependencyGraphDependencyConnection(BaseModel):
+    """
+    The connection type for DependencyGraphDependency.
+    """
+
+    edges: Optional[List[Optional[DependencyGraphDependencyEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[DependencyGraphDependency]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['DependencyGraphDependencyConnection']] = Field(
+        'DependencyGraphDependencyConnection', alias='__typename'
+    )
+
+
+class DependencyGraphDependencyEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[DependencyGraphDependency] = None
+    typename__: Optional[Literal['DependencyGraphDependencyEdge']] = Field(
+        'DependencyGraphDependencyEdge', alias='__typename'
+    )
+
+
+class DependencyGraphManifest(Node):
+    """
+    Dependency manifest for a repository
+    """
+
+    blobPath: String
+    dependencies: Optional[DependencyGraphDependencyConnection] = None
+    dependenciesCount: Optional[Int] = None
+    exceedsMaxSize: Boolean
+    filename: String
+    id: ID
+    parseable: Boolean
+    repository: Repository
+    typename__: Optional[Literal['DependencyGraphManifest']] = Field(
+        'DependencyGraphManifest', alias='__typename'
+    )
+
+
+class DependencyGraphManifestConnection(BaseModel):
+    """
+    The connection type for DependencyGraphManifest.
+    """
+
+    edges: Optional[List[Optional[DependencyGraphManifestEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[DependencyGraphManifest]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['DependencyGraphManifestConnection']] = Field(
+        'DependencyGraphManifestConnection', alias='__typename'
+    )
+
+
+class DependencyGraphManifestEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[DependencyGraphManifest] = None
+    typename__: Optional[Literal['DependencyGraphManifestEdge']] = Field(
+        'DependencyGraphManifestEdge', alias='__typename'
+    )
+
+
+class DeployKey(Node):
+    """
+    A repository deploy key.
+    """
+
+    createdAt: DateTime
+    id: ID
+    key: String
+    readOnly: Boolean
+    title: String
+    verified: Boolean
+    typename__: Optional[Literal['DeployKey']] = Field('DeployKey', alias='__typename')
+
+
+class DeployKeyConnection(BaseModel):
+    """
+    The connection type for DeployKey.
+    """
+
+    edges: Optional[List[Optional[DeployKeyEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[DeployKey]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['DeployKeyConnection']] = Field(
+        'DeployKeyConnection', alias='__typename'
+    )
+
+
+class DeployKeyEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[DeployKey] = None
+    typename__: Optional[Literal['DeployKeyEdge']] = Field(
+        'DeployKeyEdge', alias='__typename'
+    )
+
+
+class DeployedEvent(Node):
+    """
+    Represents a 'deployed' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    deployment: Deployment
+    id: ID
+    pullRequest: PullRequest
+    ref: Optional[Ref] = None
+    typename__: Optional[Literal['DeployedEvent']] = Field(
+        'DeployedEvent', alias='__typename'
+    )
+
+
+class Deployment(Node):
+    """
+    Represents triggered deployment instance.
+    """
+
+    commit: Optional[Commit] = None
+    commitOid: String
+    createdAt: DateTime
+    creator: Actor
+    databaseId: Optional[Int] = None
+    description: Optional[String] = None
+    environment: Optional[String] = None
+    id: ID
+    latestEnvironment: Optional[String] = None
+    latestStatus: Optional[DeploymentStatus] = None
+    originalEnvironment: Optional[String] = None
+    payload: Optional[String] = None
+    ref: Optional[Ref] = None
+    repository: Repository
+    state: Optional[DeploymentState] = None
+    statuses: Optional[DeploymentStatusConnection] = None
+    task: Optional[String] = None
+    updatedAt: DateTime
+    typename__: Optional[Literal['Deployment']] = Field(
+        'Deployment', alias='__typename'
+    )
+
+
+class DeploymentConnection(BaseModel):
+    """
+    The connection type for Deployment.
+    """
+
+    edges: Optional[List[Optional[DeploymentEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Deployment]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['DeploymentConnection']] = Field(
+        'DeploymentConnection', alias='__typename'
+    )
+
+
+class DeploymentEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Deployment] = None
+    typename__: Optional[Literal['DeploymentEdge']] = Field(
+        'DeploymentEdge', alias='__typename'
+    )
+
+
+class DeploymentEnvironmentChangedEvent(Node):
+    """
+    Represents a 'deployment_environment_changed' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    deploymentStatus: DeploymentStatus
+    id: ID
+    pullRequest: PullRequest
+    typename__: Optional[Literal['DeploymentEnvironmentChangedEvent']] = Field(
+        'DeploymentEnvironmentChangedEvent', alias='__typename'
+    )
+
+
+class DeploymentProtectionRule(BaseModel):
+    """
+    A protection rule.
+    """
+
+    databaseId: Optional[Int] = None
+    preventSelfReview: Optional[Boolean] = None
+    reviewers: DeploymentReviewerConnection
+    timeout: Int
+    type: DeploymentProtectionRuleType
+    typename__: Optional[Literal['DeploymentProtectionRule']] = Field(
+        'DeploymentProtectionRule', alias='__typename'
+    )
+
+
+class DeploymentProtectionRuleConnection(BaseModel):
+    """
+    The connection type for DeploymentProtectionRule.
+    """
+
+    edges: Optional[List[Optional[DeploymentProtectionRuleEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[DeploymentProtectionRule]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['DeploymentProtectionRuleConnection']] = Field(
+        'DeploymentProtectionRuleConnection', alias='__typename'
+    )
+
+
+class DeploymentProtectionRuleEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[DeploymentProtectionRule] = None
+    typename__: Optional[Literal['DeploymentProtectionRuleEdge']] = Field(
+        'DeploymentProtectionRuleEdge', alias='__typename'
+    )
+
+
+class DeploymentRequest(BaseModel):
+    """
+    A request to deploy a workflow run to an environment.
+    """
+
+    currentUserCanApprove: Boolean
+    environment: Environment
+    reviewers: DeploymentReviewerConnection
+    waitTimer: Int
+    waitTimerStartedAt: Optional[DateTime] = None
+    typename__: Optional[Literal['DeploymentRequest']] = Field(
+        'DeploymentRequest', alias='__typename'
+    )
+
+
+class DeploymentRequestConnection(BaseModel):
+    """
+    The connection type for DeploymentRequest.
+    """
+
+    edges: Optional[List[Optional[DeploymentRequestEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[DeploymentRequest]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['DeploymentRequestConnection']] = Field(
+        'DeploymentRequestConnection', alias='__typename'
+    )
+
+
+class DeploymentRequestEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[DeploymentRequest] = None
+    typename__: Optional[Literal['DeploymentRequestEdge']] = Field(
+        'DeploymentRequestEdge', alias='__typename'
+    )
+
+
+class DeploymentReview(Node):
+    """
+    A deployment review.
+    """
+
+    comment: String
+    databaseId: Optional[Int] = None
+    environments: EnvironmentConnection
+    id: ID
+    state: DeploymentReviewState
+    user: User
+    typename__: Optional[Literal['DeploymentReview']] = Field(
+        'DeploymentReview', alias='__typename'
+    )
+
+
+class DeploymentReviewConnection(BaseModel):
+    """
+    The connection type for DeploymentReview.
+    """
+
+    edges: Optional[List[Optional[DeploymentReviewEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[DeploymentReview]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['DeploymentReviewConnection']] = Field(
+        'DeploymentReviewConnection', alias='__typename'
+    )
+
+
+class DeploymentReviewEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[DeploymentReview] = None
+    typename__: Optional[Literal['DeploymentReviewEdge']] = Field(
+        'DeploymentReviewEdge', alias='__typename'
+    )
+
+
+class DeploymentReviewerConnection(BaseModel):
+    """
+    The connection type for DeploymentReviewer.
+    """
+
+    edges: Optional[List[Optional[DeploymentReviewerEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[DeploymentReviewer]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['DeploymentReviewerConnection']] = Field(
+        'DeploymentReviewerConnection', alias='__typename'
+    )
+
+
+class DeploymentReviewerEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[DeploymentReviewer] = None
+    typename__: Optional[Literal['DeploymentReviewerEdge']] = Field(
+        'DeploymentReviewerEdge', alias='__typename'
+    )
+
+
+class DeploymentStatus(Node):
+    """
+    Describes the status of a given deployment attempt.
+    """
+
+    createdAt: DateTime
+    creator: Actor
+    deployment: Deployment
+    description: Optional[String] = None
+    environment: Optional[String] = None
+    environmentUrl: Optional[URI] = None
+    id: ID
+    logUrl: Optional[URI] = None
+    state: DeploymentStatusState
+    updatedAt: DateTime
+    typename__: Optional[Literal['DeploymentStatus']] = Field(
+        'DeploymentStatus', alias='__typename'
+    )
+
+
+class DeploymentStatusConnection(BaseModel):
+    """
+    The connection type for DeploymentStatus.
+    """
+
+    edges: Optional[List[Optional[DeploymentStatusEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[DeploymentStatus]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['DeploymentStatusConnection']] = Field(
+        'DeploymentStatusConnection', alias='__typename'
+    )
+
+
+class DeploymentStatusEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[DeploymentStatus] = None
+    typename__: Optional[Literal['DeploymentStatusEdge']] = Field(
+        'DeploymentStatusEdge', alias='__typename'
+    )
+
+
+class DequeuePullRequestPayload(BaseModel):
+    """
+    Autogenerated return type of DequeuePullRequest
+    """
+
+    clientMutationId: Optional[String] = None
+    mergeQueueEntry: Optional[MergeQueueEntry] = None
+    typename__: Optional[Literal['DequeuePullRequestPayload']] = Field(
+        'DequeuePullRequestPayload', alias='__typename'
+    )
+
+
+class DisablePullRequestAutoMergePayload(BaseModel):
+    """
+    Autogenerated return type of DisablePullRequestAutoMerge
+    """
+
+    actor: Optional[Actor] = None
+    clientMutationId: Optional[String] = None
+    pullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['DisablePullRequestAutoMergePayload']] = Field(
+        'DisablePullRequestAutoMergePayload', alias='__typename'
+    )
+
+
+class DisconnectedEvent(Node):
+    """
+    Represents a 'disconnected' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    isCrossRepository: Boolean
+    source: ReferencedSubject
+    subject: ReferencedSubject
+    typename__: Optional[Literal['DisconnectedEvent']] = Field(
+        'DisconnectedEvent', alias='__typename'
+    )
+
+
+class Discussion(
+    Closable,
+    Comment,
+    Deletable,
+    Labelable,
+    Lockable,
+    Node,
+    Reactable,
+    RepositoryNode,
+    Subscribable,
+    Updatable,
+    Votable,
+):
+    """
+    A discussion in a repository.
+    """
+
+    activeLockReason: Optional[LockReason] = None
+    answer: Optional[DiscussionComment] = None
+    answerChosenAt: Optional[DateTime] = None
+    answerChosenBy: Optional[Actor] = None
+    author: Optional[Actor] = None
+    authorAssociation: CommentAuthorAssociation
+    body: String
+    bodyHTML: HTML
+    bodyText: String
+    category: DiscussionCategory
+    closed: Boolean
+    closedAt: Optional[DateTime] = None
+    comments: DiscussionCommentConnection
+    createdAt: DateTime
+    createdViaEmail: Boolean
+    databaseId: Optional[Int] = None
+    editor: Optional[Actor] = None
+    id: ID
+    includesCreatedEdit: Boolean
+    isAnswered: Optional[Boolean] = None
+    labels: Optional[LabelConnection] = None
+    lastEditedAt: Optional[DateTime] = None
+    locked: Boolean
+    number: Int
+    poll: Optional[DiscussionPoll] = None
+    publishedAt: Optional[DateTime] = None
+    reactionGroups: Optional[List[ReactionGroup]] = Field(default_factory=list)
+    reactions: ReactionConnection
+    repository: Repository
+    resourcePath: URI
+    stateReason: Optional[DiscussionStateReason] = None
+    title: String
+    updatedAt: DateTime
+    upvoteCount: Int
+    url: URI
+    userContentEdits: Optional[UserContentEditConnection] = None
+    viewerCanClose: Boolean
+    viewerCanDelete: Boolean
+    viewerCanReact: Boolean
+    viewerCanReopen: Boolean
+    viewerCanSubscribe: Boolean
+    viewerCanUpdate: Boolean
+    viewerCanUpvote: Boolean
+    viewerDidAuthor: Boolean
+    viewerHasUpvoted: Boolean
+    viewerSubscription: Optional[SubscriptionState] = None
+    typename__: Optional[Literal['Discussion']] = Field(
+        'Discussion', alias='__typename'
+    )
+
+
+class DiscussionCategory(Node, RepositoryNode):
+    """
+    A category for discussions in a repository.
+    """
+
+    createdAt: DateTime
+    description: Optional[String] = None
+    emoji: String
+    emojiHTML: HTML
+    id: ID
+    isAnswerable: Boolean
+    name: String
+    repository: Repository
+    slug: String
+    updatedAt: DateTime
+    typename__: Optional[Literal['DiscussionCategory']] = Field(
+        'DiscussionCategory', alias='__typename'
+    )
+
+
+class DiscussionCategoryConnection(BaseModel):
+    """
+    The connection type for DiscussionCategory.
+    """
+
+    edges: Optional[List[Optional[DiscussionCategoryEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[DiscussionCategory]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['DiscussionCategoryConnection']] = Field(
+        'DiscussionCategoryConnection', alias='__typename'
+    )
+
+
+class DiscussionCategoryEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[DiscussionCategory] = None
+    typename__: Optional[Literal['DiscussionCategoryEdge']] = Field(
+        'DiscussionCategoryEdge', alias='__typename'
+    )
+
+
+class DiscussionComment(
+    Comment,
+    Deletable,
+    Minimizable,
+    Node,
+    Reactable,
+    Updatable,
+    UpdatableComment,
+    Votable,
+):
+    """
+    A comment on a discussion.
+    """
+
+    author: Optional[Actor] = None
+    authorAssociation: CommentAuthorAssociation
+    body: String
+    bodyHTML: HTML
+    bodyText: String
+    createdAt: DateTime
+    createdViaEmail: Boolean
+    databaseId: Optional[Int] = None
+    deletedAt: Optional[DateTime] = None
+    discussion: Optional[Discussion] = None
+    editor: Optional[Actor] = None
+    id: ID
+    includesCreatedEdit: Boolean
+    isAnswer: Boolean
+    isMinimized: Boolean
+    lastEditedAt: Optional[DateTime] = None
+    minimizedReason: Optional[String] = None
+    publishedAt: Optional[DateTime] = None
+    reactionGroups: Optional[List[ReactionGroup]] = Field(default_factory=list)
+    reactions: ReactionConnection
+    replies: DiscussionCommentConnection
+    replyTo: Optional[DiscussionComment] = None
+    resourcePath: URI
+    updatedAt: DateTime
+    upvoteCount: Int
+    url: URI
+    userContentEdits: Optional[UserContentEditConnection] = None
+    viewerCanDelete: Boolean
+    viewerCanMarkAsAnswer: Boolean
+    viewerCanMinimize: Boolean
+    viewerCanReact: Boolean
+    viewerCanUnmarkAsAnswer: Boolean
+    viewerCanUpdate: Boolean
+    viewerCanUpvote: Boolean
+    viewerCannotUpdateReasons: List[CommentCannotUpdateReason]
+    viewerDidAuthor: Boolean
+    viewerHasUpvoted: Boolean
+    typename__: Optional[Literal['DiscussionComment']] = Field(
+        'DiscussionComment', alias='__typename'
+    )
+
+
+class DiscussionCommentConnection(BaseModel):
+    """
+    The connection type for DiscussionComment.
+    """
+
+    edges: Optional[List[Optional[DiscussionCommentEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[DiscussionComment]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['DiscussionCommentConnection']] = Field(
+        'DiscussionCommentConnection', alias='__typename'
+    )
+
+
+class DiscussionCommentEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[DiscussionComment] = None
+    typename__: Optional[Literal['DiscussionCommentEdge']] = Field(
+        'DiscussionCommentEdge', alias='__typename'
+    )
+
+
+class DiscussionConnection(BaseModel):
+    """
+    The connection type for Discussion.
+    """
+
+    edges: Optional[List[Optional[DiscussionEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Discussion]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['DiscussionConnection']] = Field(
+        'DiscussionConnection', alias='__typename'
+    )
+
+
+class DiscussionEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Discussion] = None
+    typename__: Optional[Literal['DiscussionEdge']] = Field(
+        'DiscussionEdge', alias='__typename'
+    )
+
+
+class DiscussionPoll(Node):
+    """
+    A poll for a discussion.
+    """
+
+    discussion: Optional[Discussion] = None
+    id: ID
+    options: Optional[DiscussionPollOptionConnection] = None
+    question: String
+    totalVoteCount: Int
+    viewerCanVote: Boolean
+    viewerHasVoted: Boolean
+    typename__: Optional[Literal['DiscussionPoll']] = Field(
+        'DiscussionPoll', alias='__typename'
+    )
+
+
+class DiscussionPollOption(Node):
+    """
+    An option for a discussion poll.
+    """
+
+    id: ID
+    option: String
+    poll: Optional[DiscussionPoll] = None
+    totalVoteCount: Int
+    viewerHasVoted: Boolean
+    typename__: Optional[Literal['DiscussionPollOption']] = Field(
+        'DiscussionPollOption', alias='__typename'
+    )
+
+
+class DiscussionPollOptionConnection(BaseModel):
+    """
+    The connection type for DiscussionPollOption.
+    """
+
+    edges: Optional[List[Optional[DiscussionPollOptionEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[DiscussionPollOption]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['DiscussionPollOptionConnection']] = Field(
+        'DiscussionPollOptionConnection', alias='__typename'
+    )
+
+
+class DiscussionPollOptionEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[DiscussionPollOption] = None
+    typename__: Optional[Literal['DiscussionPollOptionEdge']] = Field(
+        'DiscussionPollOptionEdge', alias='__typename'
+    )
+
+
+class DismissPullRequestReviewPayload(BaseModel):
+    """
+    Autogenerated return type of DismissPullRequestReview
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequestReview: Optional[PullRequestReview] = None
+    typename__: Optional[Literal['DismissPullRequestReviewPayload']] = Field(
+        'DismissPullRequestReviewPayload', alias='__typename'
+    )
+
+
+class DismissRepositoryVulnerabilityAlertPayload(BaseModel):
+    """
+    Autogenerated return type of DismissRepositoryVulnerabilityAlert
+    """
+
+    clientMutationId: Optional[String] = None
+    repositoryVulnerabilityAlert: Optional[RepositoryVulnerabilityAlert] = None
+    typename__: Optional[Literal['DismissRepositoryVulnerabilityAlertPayload']] = Field(
+        'DismissRepositoryVulnerabilityAlertPayload', alias='__typename'
+    )
+
+
+class DraftIssue(Node):
+    """
+    A draft issue within a project.
+    """
+
+    assignees: UserConnection
+    body: String
+    bodyHTML: HTML
+    bodyText: String
+    createdAt: DateTime
+    creator: Optional[Actor] = None
+    id: ID
+    projectV2Items: ProjectV2ItemConnection
+    projectsV2: ProjectV2Connection
+    title: String
+    updatedAt: DateTime
+    typename__: Optional[Literal['DraftIssue']] = Field(
+        'DraftIssue', alias='__typename'
+    )
+
+
+class EnablePullRequestAutoMergePayload(BaseModel):
+    """
+    Autogenerated return type of EnablePullRequestAutoMerge
+    """
+
+    actor: Optional[Actor] = None
+    clientMutationId: Optional[String] = None
+    pullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['EnablePullRequestAutoMergePayload']] = Field(
+        'EnablePullRequestAutoMergePayload', alias='__typename'
+    )
+
+
+class EnqueuePullRequestPayload(BaseModel):
+    """
+    Autogenerated return type of EnqueuePullRequest
+    """
+
+    clientMutationId: Optional[String] = None
+    mergeQueueEntry: Optional[MergeQueueEntry] = None
+    typename__: Optional[Literal['EnqueuePullRequestPayload']] = Field(
+        'EnqueuePullRequestPayload', alias='__typename'
+    )
+
+
+class Enterprise(AnnouncementBanner, Node):
+    """
+    An account to manage multiple organizations with consolidated policy and billing.
+    """
+
+    announcement: Optional[String] = None
+    announcementExpiresAt: Optional[DateTime] = None
+    announcementUserDismissible: Optional[Boolean] = None
+    avatarUrl: URI
+    billingInfo: Optional[EnterpriseBillingInfo] = None
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    description: Optional[String] = None
+    descriptionHTML: HTML
+    id: ID
+    location: Optional[String] = None
+    members: EnterpriseMemberConnection
+    name: String
+    organizations: OrganizationConnection
+    ownerInfo: Optional[EnterpriseOwnerInfo] = None
+    resourcePath: URI
+    slug: String
+    url: URI
+    viewerIsAdmin: Boolean
+    websiteUrl: Optional[URI] = None
+    typename__: Optional[Literal['Enterprise']] = Field(
+        'Enterprise', alias='__typename'
+    )
+
+
+class EnterpriseAdministratorConnection(BaseModel):
+    """
+    The connection type for User.
+    """
+
+    edges: Optional[List[Optional[EnterpriseAdministratorEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[User]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['EnterpriseAdministratorConnection']] = Field(
+        'EnterpriseAdministratorConnection', alias='__typename'
+    )
+
+
+class EnterpriseAdministratorEdge(BaseModel):
+    """
+    A User who is an administrator of an enterprise.
+    """
+
+    cursor: String
+    node: Optional[User] = None
+    role: EnterpriseAdministratorRole
+    typename__: Optional[Literal['EnterpriseAdministratorEdge']] = Field(
+        'EnterpriseAdministratorEdge', alias='__typename'
+    )
+
+
+class EnterpriseAdministratorInvitation(Node):
+    """
+    An invitation for a user to become an owner or billing manager of an enterprise.
+    """
+
+    createdAt: DateTime
+    email: Optional[String] = None
+    enterprise: Enterprise
+    id: ID
+    invitee: Optional[User] = None
+    inviter: Optional[User] = None
+    role: EnterpriseAdministratorRole
+    typename__: Optional[Literal['EnterpriseAdministratorInvitation']] = Field(
+        'EnterpriseAdministratorInvitation', alias='__typename'
+    )
+
+
+class EnterpriseAdministratorInvitationConnection(BaseModel):
+    """
+    The connection type for EnterpriseAdministratorInvitation.
+    """
+
+    edges: Optional[List[Optional[EnterpriseAdministratorInvitationEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[EnterpriseAdministratorInvitation]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[
+        Literal['EnterpriseAdministratorInvitationConnection']
+    ] = Field('EnterpriseAdministratorInvitationConnection', alias='__typename')
+
+
+class EnterpriseAdministratorInvitationEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[EnterpriseAdministratorInvitation] = None
+    typename__: Optional[Literal['EnterpriseAdministratorInvitationEdge']] = Field(
+        'EnterpriseAdministratorInvitationEdge', alias='__typename'
+    )
+
+
+class EnterpriseBillingInfo(BaseModel):
+    """
+    Enterprise billing information visible to enterprise billing managers and owners.
+    """
+
+    allLicensableUsersCount: Int
+    assetPacks: Int
+    bandwidthQuota: Float
+    bandwidthUsage: Float
+    bandwidthUsagePercentage: Int
+    storageQuota: Float
+    storageUsage: Float
+    storageUsagePercentage: Int
+    totalAvailableLicenses: Int
+    totalLicenses: Int
+    typename__: Optional[Literal['EnterpriseBillingInfo']] = Field(
+        'EnterpriseBillingInfo', alias='__typename'
+    )
+
+
+class EnterpriseConnection(BaseModel):
+    """
+    The connection type for Enterprise.
+    """
+
+    edges: Optional[List[Optional[EnterpriseEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Enterprise]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['EnterpriseConnection']] = Field(
+        'EnterpriseConnection', alias='__typename'
+    )
+
+
+class EnterpriseEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Enterprise] = None
+    typename__: Optional[Literal['EnterpriseEdge']] = Field(
+        'EnterpriseEdge', alias='__typename'
+    )
+
+
+class EnterpriseFailedInvitationConnection(BaseModel):
+    """
+    The connection type for OrganizationInvitation.
+    """
+
+    edges: Optional[List[Optional[EnterpriseFailedInvitationEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[OrganizationInvitation]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    totalUniqueUserCount: Int
+    typename__: Optional[Literal['EnterpriseFailedInvitationConnection']] = Field(
+        'EnterpriseFailedInvitationConnection', alias='__typename'
+    )
+
+
+class EnterpriseFailedInvitationEdge(BaseModel):
+    """
+    A failed invitation to be a member in an enterprise organization.
+    """
+
+    cursor: String
+    node: Optional[OrganizationInvitation] = None
+    typename__: Optional[Literal['EnterpriseFailedInvitationEdge']] = Field(
+        'EnterpriseFailedInvitationEdge', alias='__typename'
+    )
+
+
+class EnterpriseIdentityProvider(Node):
+    """
+    An identity provider configured to provision identities for an enterprise.
+    Visible to enterprise owners or enterprise owners' personal access tokens
+    (classic) with read:enterprise or admin:enterprise scope.
+    """
+
+    digestMethod: Optional[SamlDigestAlgorithm] = None
+    enterprise: Optional[Enterprise] = None
+    externalIdentities: ExternalIdentityConnection
+    id: ID
+    idpCertificate: Optional[X509Certificate] = None
+    issuer: Optional[String] = None
+    recoveryCodes: Optional[List[String]] = Field(default_factory=list)
+    signatureMethod: Optional[SamlSignatureAlgorithm] = None
+    ssoUrl: Optional[URI] = None
+    typename__: Optional[Literal['EnterpriseIdentityProvider']] = Field(
+        'EnterpriseIdentityProvider', alias='__typename'
+    )
+
+
+class EnterpriseMemberConnection(BaseModel):
+    """
+    The connection type for EnterpriseMember.
+    """
+
+    edges: Optional[List[Optional[EnterpriseMemberEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[EnterpriseMember]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['EnterpriseMemberConnection']] = Field(
+        'EnterpriseMemberConnection', alias='__typename'
+    )
+
+
+class EnterpriseMemberEdge(BaseModel):
+    """
+    A User who is a member of an enterprise through one or more organizations.
+    """
+
+    cursor: String
+    node: Optional[EnterpriseMember] = None
+    typename__: Optional[Literal['EnterpriseMemberEdge']] = Field(
+        'EnterpriseMemberEdge', alias='__typename'
+    )
+
+
+class EnterpriseOrganizationMembershipConnection(BaseModel):
+    """
+    The connection type for Organization.
+    """
+
+    edges: Optional[List[Optional[EnterpriseOrganizationMembershipEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[Organization]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['EnterpriseOrganizationMembershipConnection']] = Field(
+        'EnterpriseOrganizationMembershipConnection', alias='__typename'
+    )
+
+
+class EnterpriseOrganizationMembershipEdge(BaseModel):
+    """
+    An enterprise organization that a user is a member of.
+    """
+
+    cursor: String
+    node: Optional[Organization] = None
+    role: EnterpriseUserAccountMembershipRole
+    typename__: Optional[Literal['EnterpriseOrganizationMembershipEdge']] = Field(
+        'EnterpriseOrganizationMembershipEdge', alias='__typename'
+    )
+
+
+class EnterpriseOutsideCollaboratorConnection(BaseModel):
+    """
+    The connection type for User.
+    """
+
+    edges: Optional[List[Optional[EnterpriseOutsideCollaboratorEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[User]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['EnterpriseOutsideCollaboratorConnection']] = Field(
+        'EnterpriseOutsideCollaboratorConnection', alias='__typename'
+    )
+
+
+class EnterpriseOutsideCollaboratorEdge(BaseModel):
+    """
+    A User who is an outside collaborator of an enterprise through one or more organizations.
+    """
+
+    cursor: String
+    node: Optional[User] = None
+    repositories: EnterpriseRepositoryInfoConnection
+    typename__: Optional[Literal['EnterpriseOutsideCollaboratorEdge']] = Field(
+        'EnterpriseOutsideCollaboratorEdge', alias='__typename'
+    )
+
+
+class EnterpriseOwnerInfo(BaseModel):
+    """
+    Enterprise information visible to enterprise owners or enterprise owners'
+    personal access tokens (classic) with read:enterprise or admin:enterprise scope.
+    """
+
+    admins: EnterpriseAdministratorConnection
+    affiliatedUsersWithTwoFactorDisabled: UserConnection
+    affiliatedUsersWithTwoFactorDisabledExist: Boolean
+    allowPrivateRepositoryForkingSetting: EnterpriseEnabledDisabledSettingValue
+    allowPrivateRepositoryForkingSettingOrganizations: OrganizationConnection
+    allowPrivateRepositoryForkingSettingPolicyValue: Optional[
+        EnterpriseAllowPrivateRepositoryForkingPolicyValue
+    ] = None
+    defaultRepositoryPermissionSetting: EnterpriseDefaultRepositoryPermissionSettingValue
+    defaultRepositoryPermissionSettingOrganizations: OrganizationConnection
+    domains: VerifiableDomainConnection
+    enterpriseServerInstallations: EnterpriseServerInstallationConnection
+    failedInvitations: EnterpriseFailedInvitationConnection
+    ipAllowListEnabledSetting: IpAllowListEnabledSettingValue
+    ipAllowListEntries: IpAllowListEntryConnection
+    ipAllowListForInstalledAppsEnabledSetting: IpAllowListForInstalledAppsEnabledSettingValue
+    isUpdatingDefaultRepositoryPermission: Boolean
+    isUpdatingTwoFactorRequirement: Boolean
+    membersCanChangeRepositoryVisibilitySetting: EnterpriseEnabledDisabledSettingValue
+    membersCanChangeRepositoryVisibilitySettingOrganizations: OrganizationConnection
+    membersCanCreateInternalRepositoriesSetting: Optional[Boolean] = None
+    membersCanCreatePrivateRepositoriesSetting: Optional[Boolean] = None
+    membersCanCreatePublicRepositoriesSetting: Optional[Boolean] = None
+    membersCanCreateRepositoriesSetting: Optional[
+        EnterpriseMembersCanCreateRepositoriesSettingValue
+    ] = None
+    membersCanCreateRepositoriesSettingOrganizations: OrganizationConnection
+    membersCanDeleteIssuesSetting: EnterpriseEnabledDisabledSettingValue
+    membersCanDeleteIssuesSettingOrganizations: OrganizationConnection
+    membersCanDeleteRepositoriesSetting: EnterpriseEnabledDisabledSettingValue
+    membersCanDeleteRepositoriesSettingOrganizations: OrganizationConnection
+    membersCanInviteCollaboratorsSetting: EnterpriseEnabledDisabledSettingValue
+    membersCanInviteCollaboratorsSettingOrganizations: OrganizationConnection
+    membersCanMakePurchasesSetting: EnterpriseMembersCanMakePurchasesSettingValue
+    membersCanUpdateProtectedBranchesSetting: EnterpriseEnabledDisabledSettingValue
+    membersCanUpdateProtectedBranchesSettingOrganizations: OrganizationConnection
+    membersCanViewDependencyInsightsSetting: EnterpriseEnabledDisabledSettingValue
+    membersCanViewDependencyInsightsSettingOrganizations: OrganizationConnection
+    notificationDeliveryRestrictionEnabledSetting: NotificationRestrictionSettingValue
+    oidcProvider: Optional[OIDCProvider] = None
+    organizationProjectsSetting: EnterpriseEnabledDisabledSettingValue
+    organizationProjectsSettingOrganizations: OrganizationConnection
+    outsideCollaborators: EnterpriseOutsideCollaboratorConnection
+    pendingAdminInvitations: EnterpriseAdministratorInvitationConnection
+    pendingCollaboratorInvitations: RepositoryInvitationConnection
+    pendingMemberInvitations: EnterprisePendingMemberInvitationConnection
+    repositoryProjectsSetting: EnterpriseEnabledDisabledSettingValue
+    repositoryProjectsSettingOrganizations: OrganizationConnection
+    samlIdentityProvider: Optional[EnterpriseIdentityProvider] = None
+    samlIdentityProviderSettingOrganizations: OrganizationConnection
+    supportEntitlements: EnterpriseMemberConnection
+    teamDiscussionsSetting: EnterpriseEnabledDisabledSettingValue
+    teamDiscussionsSettingOrganizations: OrganizationConnection
+    twoFactorRequiredSetting: EnterpriseEnabledSettingValue
+    twoFactorRequiredSettingOrganizations: OrganizationConnection
+    typename__: Optional[Literal['EnterpriseOwnerInfo']] = Field(
+        'EnterpriseOwnerInfo', alias='__typename'
+    )
+
+
+class EnterprisePendingMemberInvitationConnection(BaseModel):
+    """
+    The connection type for OrganizationInvitation.
+    """
+
+    edges: Optional[List[Optional[EnterprisePendingMemberInvitationEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[OrganizationInvitation]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    totalUniqueUserCount: Int
+    typename__: Optional[
+        Literal['EnterprisePendingMemberInvitationConnection']
+    ] = Field('EnterprisePendingMemberInvitationConnection', alias='__typename')
+
+
+class EnterprisePendingMemberInvitationEdge(BaseModel):
+    """
+    An invitation to be a member in an enterprise organization.
+    """
+
+    cursor: String
+    node: Optional[OrganizationInvitation] = None
+    typename__: Optional[Literal['EnterprisePendingMemberInvitationEdge']] = Field(
+        'EnterprisePendingMemberInvitationEdge', alias='__typename'
+    )
+
+
+class EnterpriseRepositoryInfo(Node):
+    """
+    A subset of repository information queryable from an enterprise.
+    """
+
+    id: ID
+    isPrivate: Boolean
+    name: String
+    nameWithOwner: String
+    typename__: Optional[Literal['EnterpriseRepositoryInfo']] = Field(
+        'EnterpriseRepositoryInfo', alias='__typename'
+    )
+
+
+class EnterpriseRepositoryInfoConnection(BaseModel):
+    """
+    The connection type for EnterpriseRepositoryInfo.
+    """
+
+    edges: Optional[List[Optional[EnterpriseRepositoryInfoEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[EnterpriseRepositoryInfo]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['EnterpriseRepositoryInfoConnection']] = Field(
+        'EnterpriseRepositoryInfoConnection', alias='__typename'
+    )
+
+
+class EnterpriseRepositoryInfoEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[EnterpriseRepositoryInfo] = None
+    typename__: Optional[Literal['EnterpriseRepositoryInfoEdge']] = Field(
+        'EnterpriseRepositoryInfoEdge', alias='__typename'
+    )
+
+
+class EnterpriseServerInstallation(Node):
+    """
+    An Enterprise Server installation.
+    """
+
+    createdAt: DateTime
+    customerName: String
+    hostName: String
+    id: ID
+    isConnected: Boolean
+    updatedAt: DateTime
+    userAccounts: EnterpriseServerUserAccountConnection
+    userAccountsUploads: EnterpriseServerUserAccountsUploadConnection
+    typename__: Optional[Literal['EnterpriseServerInstallation']] = Field(
+        'EnterpriseServerInstallation', alias='__typename'
+    )
+
+
+class EnterpriseServerInstallationConnection(BaseModel):
+    """
+    The connection type for EnterpriseServerInstallation.
+    """
+
+    edges: Optional[List[Optional[EnterpriseServerInstallationEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[EnterpriseServerInstallation]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['EnterpriseServerInstallationConnection']] = Field(
+        'EnterpriseServerInstallationConnection', alias='__typename'
+    )
+
+
+class EnterpriseServerInstallationEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[EnterpriseServerInstallation] = None
+    typename__: Optional[Literal['EnterpriseServerInstallationEdge']] = Field(
+        'EnterpriseServerInstallationEdge', alias='__typename'
+    )
+
+
+class EnterpriseServerInstallationMembershipConnection(BaseModel):
+    """
+    The connection type for EnterpriseServerInstallation.
+    """
+
+    edges: Optional[List[Optional[EnterpriseServerInstallationMembershipEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[EnterpriseServerInstallation]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[
+        Literal['EnterpriseServerInstallationMembershipConnection']
+    ] = Field('EnterpriseServerInstallationMembershipConnection', alias='__typename')
+
+
+class EnterpriseServerInstallationMembershipEdge(BaseModel):
+    """
+    An Enterprise Server installation that a user is a member of.
+    """
+
+    cursor: String
+    node: Optional[EnterpriseServerInstallation] = None
+    role: EnterpriseUserAccountMembershipRole
+    typename__: Optional[Literal['EnterpriseServerInstallationMembershipEdge']] = Field(
+        'EnterpriseServerInstallationMembershipEdge', alias='__typename'
+    )
+
+
+class EnterpriseServerUserAccount(Node):
+    """
+    A user account on an Enterprise Server installation.
+    """
+
+    createdAt: DateTime
+    emails: EnterpriseServerUserAccountEmailConnection
+    enterpriseServerInstallation: EnterpriseServerInstallation
+    id: ID
+    isSiteAdmin: Boolean
+    login: String
+    profileName: Optional[String] = None
+    remoteCreatedAt: DateTime
+    remoteUserId: Int
+    updatedAt: DateTime
+    typename__: Optional[Literal['EnterpriseServerUserAccount']] = Field(
+        'EnterpriseServerUserAccount', alias='__typename'
+    )
+
+
+class EnterpriseServerUserAccountConnection(BaseModel):
+    """
+    The connection type for EnterpriseServerUserAccount.
+    """
+
+    edges: Optional[List[Optional[EnterpriseServerUserAccountEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[EnterpriseServerUserAccount]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['EnterpriseServerUserAccountConnection']] = Field(
+        'EnterpriseServerUserAccountConnection', alias='__typename'
+    )
+
+
+class EnterpriseServerUserAccountEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[EnterpriseServerUserAccount] = None
+    typename__: Optional[Literal['EnterpriseServerUserAccountEdge']] = Field(
+        'EnterpriseServerUserAccountEdge', alias='__typename'
+    )
+
+
+class EnterpriseServerUserAccountEmail(Node):
+    """
+    An email belonging to a user account on an Enterprise Server installation.
+    """
+
+    createdAt: DateTime
+    email: String
+    id: ID
+    isPrimary: Boolean
+    updatedAt: DateTime
+    userAccount: EnterpriseServerUserAccount
+    typename__: Optional[Literal['EnterpriseServerUserAccountEmail']] = Field(
+        'EnterpriseServerUserAccountEmail', alias='__typename'
+    )
+
+
+class EnterpriseServerUserAccountEmailConnection(BaseModel):
+    """
+    The connection type for EnterpriseServerUserAccountEmail.
+    """
+
+    edges: Optional[List[Optional[EnterpriseServerUserAccountEmailEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[EnterpriseServerUserAccountEmail]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['EnterpriseServerUserAccountEmailConnection']] = Field(
+        'EnterpriseServerUserAccountEmailConnection', alias='__typename'
+    )
+
+
+class EnterpriseServerUserAccountEmailEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[EnterpriseServerUserAccountEmail] = None
+    typename__: Optional[Literal['EnterpriseServerUserAccountEmailEdge']] = Field(
+        'EnterpriseServerUserAccountEmailEdge', alias='__typename'
+    )
+
+
+class EnterpriseServerUserAccountsUpload(Node):
+    """
+    A user accounts upload from an Enterprise Server installation.
+    """
+
+    createdAt: DateTime
+    enterprise: Enterprise
+    enterpriseServerInstallation: EnterpriseServerInstallation
+    id: ID
+    name: String
+    syncState: EnterpriseServerUserAccountsUploadSyncState
+    updatedAt: DateTime
+    typename__: Optional[Literal['EnterpriseServerUserAccountsUpload']] = Field(
+        'EnterpriseServerUserAccountsUpload', alias='__typename'
+    )
+
+
+class EnterpriseServerUserAccountsUploadConnection(BaseModel):
+    """
+    The connection type for EnterpriseServerUserAccountsUpload.
+    """
+
+    edges: Optional[List[Optional[EnterpriseServerUserAccountsUploadEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[EnterpriseServerUserAccountsUpload]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[
+        Literal['EnterpriseServerUserAccountsUploadConnection']
+    ] = Field('EnterpriseServerUserAccountsUploadConnection', alias='__typename')
+
+
+class EnterpriseServerUserAccountsUploadEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[EnterpriseServerUserAccountsUpload] = None
+    typename__: Optional[Literal['EnterpriseServerUserAccountsUploadEdge']] = Field(
+        'EnterpriseServerUserAccountsUploadEdge', alias='__typename'
+    )
+
+
+class EnterpriseUserAccount(Actor, Node):
+    """
+    An account for a user who is an admin of an enterprise or a member of an enterprise through one or more organizations.
+    """
+
+    avatarUrl: URI
+    createdAt: DateTime
+    enterprise: Enterprise
+    enterpriseInstallations: EnterpriseServerInstallationMembershipConnection
+    id: ID
+    login: String
+    name: Optional[String] = None
+    organizations: EnterpriseOrganizationMembershipConnection
+    resourcePath: URI
+    updatedAt: DateTime
+    url: URI
+    user: Optional[User] = None
+    typename__: Optional[Literal['EnterpriseUserAccount']] = Field(
+        'EnterpriseUserAccount', alias='__typename'
+    )
+
+
+class Environment(Node):
+    """
+    An environment.
+    """
+
+    databaseId: Optional[Int] = None
+    id: ID
+    name: String
+    protectionRules: DeploymentProtectionRuleConnection
+    typename__: Optional[Literal['Environment']] = Field(
+        'Environment', alias='__typename'
+    )
+
+
+class EnvironmentConnection(BaseModel):
+    """
+    The connection type for Environment.
+    """
+
+    edges: Optional[List[Optional[EnvironmentEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Environment]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['EnvironmentConnection']] = Field(
+        'EnvironmentConnection', alias='__typename'
+    )
+
+
+class EnvironmentEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Environment] = None
+    typename__: Optional[Literal['EnvironmentEdge']] = Field(
+        'EnvironmentEdge', alias='__typename'
+    )
+
+
+class ExternalIdentity(Node):
+    """
+    An external identity provisioned by SAML SSO or SCIM. If SAML is configured on
+    the organization, the external identity is visible to (1) organization owners,
+    (2) organization owners' personal access tokens (classic) with read:org or
+    admin:org scope, (3) GitHub App with an installation token with read or write
+    access to members. If SAML is configured on the enterprise, the external
+    identity is visible to (1) enterprise owners, (2) enterprise owners' personal
+    access tokens (classic) with read:enterprise or admin:enterprise scope.
+    """
+
+    guid: String
+    id: ID
+    organizationInvitation: Optional[OrganizationInvitation] = None
+    samlIdentity: Optional[ExternalIdentitySamlAttributes] = None
+    scimIdentity: Optional[ExternalIdentityScimAttributes] = None
+    user: Optional[User] = None
+    typename__: Optional[Literal['ExternalIdentity']] = Field(
+        'ExternalIdentity', alias='__typename'
+    )
+
+
+class ExternalIdentityAttribute(BaseModel):
+    """
+    An attribute for the External Identity attributes collection
+    """
+
+    metadata: Optional[String] = None
+    name: String
+    value: String
+    typename__: Optional[Literal['ExternalIdentityAttribute']] = Field(
+        'ExternalIdentityAttribute', alias='__typename'
+    )
+
+
+class ExternalIdentityConnection(BaseModel):
+    """
+    The connection type for ExternalIdentity.
+    """
+
+    edges: Optional[List[Optional[ExternalIdentityEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[ExternalIdentity]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ExternalIdentityConnection']] = Field(
+        'ExternalIdentityConnection', alias='__typename'
+    )
+
+
+class ExternalIdentityEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[ExternalIdentity] = None
+    typename__: Optional[Literal['ExternalIdentityEdge']] = Field(
+        'ExternalIdentityEdge', alias='__typename'
+    )
+
+
+class ExternalIdentitySamlAttributes(BaseModel):
+    """
+    SAML attributes for the External Identity
+    """
+
+    attributes: List[ExternalIdentityAttribute]
+    emails: Optional[List[UserEmailMetadata]] = Field(default_factory=list)
+    familyName: Optional[String] = None
+    givenName: Optional[String] = None
+    groups: Optional[List[String]] = Field(default_factory=list)
+    nameId: Optional[String] = None
+    username: Optional[String] = None
+    typename__: Optional[Literal['ExternalIdentitySamlAttributes']] = Field(
+        'ExternalIdentitySamlAttributes', alias='__typename'
+    )
+
+
+class ExternalIdentityScimAttributes(BaseModel):
+    """
+    SCIM attributes for the External Identity
+    """
+
+    emails: Optional[List[UserEmailMetadata]] = Field(default_factory=list)
+    familyName: Optional[String] = None
+    givenName: Optional[String] = None
+    groups: Optional[List[String]] = Field(default_factory=list)
+    username: Optional[String] = None
+    typename__: Optional[Literal['ExternalIdentityScimAttributes']] = Field(
+        'ExternalIdentityScimAttributes', alias='__typename'
+    )
+
+
+class FollowOrganizationPayload(BaseModel):
+    """
+    Autogenerated return type of FollowOrganization
+    """
+
+    clientMutationId: Optional[String] = None
+    organization: Optional[Organization] = None
+    typename__: Optional[Literal['FollowOrganizationPayload']] = Field(
+        'FollowOrganizationPayload', alias='__typename'
+    )
+
+
+class FollowUserPayload(BaseModel):
+    """
+    Autogenerated return type of FollowUser
+    """
+
+    clientMutationId: Optional[String] = None
+    user: Optional[User] = None
+    typename__: Optional[Literal['FollowUserPayload']] = Field(
+        'FollowUserPayload', alias='__typename'
+    )
+
+
+class FollowerConnection(BaseModel):
+    """
+    The connection type for User.
+    """
+
+    edges: Optional[List[Optional[UserEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[User]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['FollowerConnection']] = Field(
+        'FollowerConnection', alias='__typename'
+    )
+
+
+class FollowingConnection(BaseModel):
+    """
+    The connection type for User.
+    """
+
+    edges: Optional[List[Optional[UserEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[User]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['FollowingConnection']] = Field(
+        'FollowingConnection', alias='__typename'
+    )
+
+
+class FundingLink(BaseModel):
+    """
+    A funding platform link for a repository.
+    """
+
+    platform: FundingPlatform
+    url: URI
+    typename__: Optional[Literal['FundingLink']] = Field(
+        'FundingLink', alias='__typename'
+    )
+
+
+class GenericHovercardContext(HovercardContext):
+    """
+    A generic hovercard context with a message and icon
+    """
+
+    message: String
+    octicon: String
+    typename__: Optional[Literal['GenericHovercardContext']] = Field(
+        'GenericHovercardContext', alias='__typename'
+    )
+
+
+class Gist(Node, Starrable, UniformResourceLocatable):
+    """
+    A Gist.
+    """
+
+    comments: GistCommentConnection
+    createdAt: DateTime
+    description: Optional[String] = None
+    files: Optional[List[Optional[GistFile]]] = Field(default_factory=list)
+    forks: GistConnection
+    id: ID
+    isFork: Boolean
+    isPublic: Boolean
+    name: String
+    owner: Optional[RepositoryOwner] = None
+    pushedAt: Optional[DateTime] = None
+    resourcePath: URI
+    stargazerCount: Int
+    stargazers: StargazerConnection
+    updatedAt: DateTime
+    url: URI
+    viewerHasStarred: Boolean
+    typename__: Optional[Literal['Gist']] = Field('Gist', alias='__typename')
+
+
+class GistComment(Comment, Deletable, Minimizable, Node, Updatable, UpdatableComment):
+    """
+    Represents a comment on an Gist.
+    """
+
+    author: Optional[Actor] = None
+    authorAssociation: CommentAuthorAssociation
+    body: String
+    bodyHTML: HTML
+    bodyText: String
+    createdAt: DateTime
+    createdViaEmail: Boolean
+    databaseId: Optional[Int] = None
+    editor: Optional[Actor] = None
+    gist: Gist
+    id: ID
+    includesCreatedEdit: Boolean
+    isMinimized: Boolean
+    lastEditedAt: Optional[DateTime] = None
+    minimizedReason: Optional[String] = None
+    publishedAt: Optional[DateTime] = None
+    updatedAt: DateTime
+    userContentEdits: Optional[UserContentEditConnection] = None
+    viewerCanDelete: Boolean
+    viewerCanMinimize: Boolean
+    viewerCanUpdate: Boolean
+    viewerCannotUpdateReasons: List[CommentCannotUpdateReason]
+    viewerDidAuthor: Boolean
+    typename__: Optional[Literal['GistComment']] = Field(
+        'GistComment', alias='__typename'
+    )
+
+
+class GistCommentConnection(BaseModel):
+    """
+    The connection type for GistComment.
+    """
+
+    edges: Optional[List[Optional[GistCommentEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[GistComment]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['GistCommentConnection']] = Field(
+        'GistCommentConnection', alias='__typename'
+    )
+
+
+class GistCommentEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[GistComment] = None
+    typename__: Optional[Literal['GistCommentEdge']] = Field(
+        'GistCommentEdge', alias='__typename'
+    )
+
+
+class GistConnection(BaseModel):
+    """
+    The connection type for Gist.
+    """
+
+    edges: Optional[List[Optional[GistEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Gist]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['GistConnection']] = Field(
+        'GistConnection', alias='__typename'
+    )
+
+
+class GistEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Gist] = None
+    typename__: Optional[Literal['GistEdge']] = Field('GistEdge', alias='__typename')
+
+
+class GistFile(BaseModel):
+    """
+    A file in a gist.
+    """
+
+    encodedName: Optional[String] = None
+    encoding: Optional[String] = None
+    extension: Optional[String] = None
+    isImage: Boolean
+    isTruncated: Boolean
+    language: Optional[Language] = None
+    name: Optional[String] = None
+    size: Optional[Int] = None
+    text: Optional[String] = None
+    typename__: Optional[Literal['GistFile']] = Field('GistFile', alias='__typename')
+
+
+class GitActor(BaseModel):
+    """
+    Represents an actor in a Git commit (ie. an author or committer).
+    """
+
+    avatarUrl: URI
+    date: Optional[GitTimestamp] = None
+    email: Optional[String] = None
+    name: Optional[String] = None
+    user: Optional[User] = None
+    typename__: Optional[Literal['GitActor']] = Field('GitActor', alias='__typename')
+
+
+class GitActorConnection(BaseModel):
+    """
+    The connection type for GitActor.
+    """
+
+    edges: Optional[List[Optional[GitActorEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[GitActor]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['GitActorConnection']] = Field(
+        'GitActorConnection', alias='__typename'
+    )
+
+
+class GitActorEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[GitActor] = None
+    typename__: Optional[Literal['GitActorEdge']] = Field(
+        'GitActorEdge', alias='__typename'
+    )
+
+
+class GitHubMetadata(BaseModel):
+    """
+    Represents information about the GitHub instance.
+    """
+
+    gitHubServicesSha: GitObjectID
+    gitIpAddresses: Optional[List[String]] = Field(default_factory=list)
+    githubEnterpriseImporterIpAddresses: Optional[List[String]] = Field(
+        default_factory=list
+    )
+    hookIpAddresses: Optional[List[String]] = Field(default_factory=list)
+    importerIpAddresses: Optional[List[String]] = Field(default_factory=list)
+    isPasswordAuthenticationVerifiable: Boolean
+    pagesIpAddresses: Optional[List[String]] = Field(default_factory=list)
+    typename__: Optional[Literal['GitHubMetadata']] = Field(
+        'GitHubMetadata', alias='__typename'
+    )
+
+
+class GpgSignature(GitSignature):
+    """
+    Represents a GPG signature on a Commit or Tag.
+    """
+
+    email: String
+    isValid: Boolean
+    keyId: Optional[String] = None
+    payload: String
+    signature: String
+    signer: Optional[User] = None
+    state: GitSignatureState
+    wasSignedByGitHub: Boolean
+    typename__: Optional[Literal['GpgSignature']] = Field(
+        'GpgSignature', alias='__typename'
+    )
+
+
+class GrantEnterpriseOrganizationsMigratorRolePayload(BaseModel):
+    """
+    Autogenerated return type of GrantEnterpriseOrganizationsMigratorRole
+    """
+
+    clientMutationId: Optional[String] = None
+    organizations: Optional[OrganizationConnection] = None
+    typename__: Optional[
+        Literal['GrantEnterpriseOrganizationsMigratorRolePayload']
+    ] = Field('GrantEnterpriseOrganizationsMigratorRolePayload', alias='__typename')
+
+
+class GrantMigratorRolePayload(BaseModel):
+    """
+    Autogenerated return type of GrantMigratorRole
+    """
+
+    clientMutationId: Optional[String] = None
+    success: Optional[Boolean] = None
+    typename__: Optional[Literal['GrantMigratorRolePayload']] = Field(
+        'GrantMigratorRolePayload', alias='__typename'
+    )
+
+
+class HeadRefDeletedEvent(Node):
+    """
+    Represents a 'head_ref_deleted' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    headRef: Optional[Ref] = None
+    headRefName: String
+    id: ID
+    pullRequest: PullRequest
+    typename__: Optional[Literal['HeadRefDeletedEvent']] = Field(
+        'HeadRefDeletedEvent', alias='__typename'
+    )
+
+
+class HeadRefForcePushedEvent(Node):
+    """
+    Represents a 'head_ref_force_pushed' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    afterCommit: Optional[Commit] = None
+    beforeCommit: Optional[Commit] = None
+    createdAt: DateTime
+    id: ID
+    pullRequest: PullRequest
+    ref: Optional[Ref] = None
+    typename__: Optional[Literal['HeadRefForcePushedEvent']] = Field(
+        'HeadRefForcePushedEvent', alias='__typename'
+    )
+
+
+class HeadRefRestoredEvent(Node):
+    """
+    Represents a 'head_ref_restored' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    pullRequest: PullRequest
+    typename__: Optional[Literal['HeadRefRestoredEvent']] = Field(
+        'HeadRefRestoredEvent', alias='__typename'
+    )
+
+
+class Hovercard(BaseModel):
+    """
+    Detail needed to display a hovercard for a user
+    """
+
+    contexts: List[HovercardContext]
+    typename__: Optional[Literal['Hovercard']] = Field('Hovercard', alias='__typename')
+
+
+class ImportProjectPayload(BaseModel):
+    """
+    Autogenerated return type of ImportProject
+    """
+
+    clientMutationId: Optional[String] = None
+    project: Optional[Project] = None
+    typename__: Optional[Literal['ImportProjectPayload']] = Field(
+        'ImportProjectPayload', alias='__typename'
+    )
+
+
+class InviteEnterpriseAdminPayload(BaseModel):
+    """
+    Autogenerated return type of InviteEnterpriseAdmin
+    """
+
+    clientMutationId: Optional[String] = None
+    invitation: Optional[EnterpriseAdministratorInvitation] = None
+    typename__: Optional[Literal['InviteEnterpriseAdminPayload']] = Field(
+        'InviteEnterpriseAdminPayload', alias='__typename'
+    )
+
+
+class IpAllowListEntry(Node):
+    """
+    An IP address or range of addresses that is allowed to access an owner's resources.
+    """
+
+    allowListValue: String
+    createdAt: DateTime
+    id: ID
+    isActive: Boolean
+    name: Optional[String] = None
+    owner: IpAllowListOwner
+    updatedAt: DateTime
+    typename__: Optional[Literal['IpAllowListEntry']] = Field(
+        'IpAllowListEntry', alias='__typename'
+    )
+
+
+class IpAllowListEntryConnection(BaseModel):
+    """
+    The connection type for IpAllowListEntry.
+    """
+
+    edges: Optional[List[Optional[IpAllowListEntryEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[IpAllowListEntry]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['IpAllowListEntryConnection']] = Field(
+        'IpAllowListEntryConnection', alias='__typename'
+    )
+
+
+class IpAllowListEntryEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[IpAllowListEntry] = None
+    typename__: Optional[Literal['IpAllowListEntryEdge']] = Field(
+        'IpAllowListEntryEdge', alias='__typename'
+    )
+
+
+class Issue(
+    Assignable,
+    Closable,
+    Comment,
+    Deletable,
+    Labelable,
+    Lockable,
+    Node,
+    ProjectV2Owner,
+    Reactable,
+    RepositoryNode,
+    Subscribable,
+    SubscribableThread,
+    UniformResourceLocatable,
+    Updatable,
+    UpdatableComment,
+):
+    """
+    An Issue is a place to discuss ideas, enhancements, tasks, and bugs for a project.
+    """
+
+    activeLockReason: Optional[LockReason] = None
+    assignees: UserConnection
+    author: Optional[Actor] = None
+    authorAssociation: CommentAuthorAssociation
+    body: String
+    bodyHTML: HTML
+    bodyResourcePath: URI
+    bodyText: String
+    bodyUrl: URI
+    closed: Boolean
+    closedAt: Optional[DateTime] = None
+    comments: IssueCommentConnection
+    createdAt: DateTime
+    createdViaEmail: Boolean
+    databaseId: Optional[Int] = None
+    editor: Optional[Actor] = None
+    fullDatabaseId: Optional[BigInt] = None
+    hovercard: Hovercard
+    id: ID
+    includesCreatedEdit: Boolean
+    isPinned: Optional[Boolean] = None
+    isReadByViewer: Optional[Boolean] = None
+    labels: Optional[LabelConnection] = None
+    lastEditedAt: Optional[DateTime] = None
+    linkedBranches: LinkedBranchConnection
+    locked: Boolean
+    milestone: Optional[Milestone] = None
+    number: Int
+    participants: UserConnection
+    projectCards: ProjectCardConnection
+    projectItems: ProjectV2ItemConnection
+    projectV2: Optional[ProjectV2] = None
+    projectsV2: ProjectV2Connection
+    publishedAt: Optional[DateTime] = None
+    reactionGroups: Optional[List[ReactionGroup]] = Field(default_factory=list)
+    reactions: ReactionConnection
+    repository: Repository
+    resourcePath: URI
+    state: IssueState
+    stateReason: Optional[IssueStateReason] = None
+    timeline: IssueTimelineConnection
+    timelineItems: IssueTimelineItemsConnection
+    title: String
+    titleHTML: String
+    trackedInIssues: IssueConnection
+    trackedIssues: IssueConnection
+    trackedIssuesCount: Int
+    updatedAt: DateTime
+    url: URI
+    userContentEdits: Optional[UserContentEditConnection] = None
+    viewerCanClose: Boolean
+    viewerCanDelete: Boolean
+    viewerCanReact: Boolean
+    viewerCanReopen: Boolean
+    viewerCanSubscribe: Boolean
+    viewerCanUpdate: Boolean
+    viewerCannotUpdateReasons: List[CommentCannotUpdateReason]
+    viewerDidAuthor: Boolean
+    viewerSubscription: Optional[SubscriptionState] = None
+    viewerThreadSubscriptionFormAction: Optional[ThreadSubscriptionFormAction] = None
+    viewerThreadSubscriptionStatus: Optional[ThreadSubscriptionState] = None
+    typename__: Optional[Literal['Issue']] = Field('Issue', alias='__typename')
+
+
+class IssueComment(
+    Comment,
+    Deletable,
+    Minimizable,
+    Node,
+    Reactable,
+    RepositoryNode,
+    Updatable,
+    UpdatableComment,
+):
+    """
+    Represents a comment on an Issue.
+    """
+
+    author: Optional[Actor] = None
+    authorAssociation: CommentAuthorAssociation
+    body: String
+    bodyHTML: HTML
+    bodyText: String
+    createdAt: DateTime
+    createdViaEmail: Boolean
+    databaseId: Optional[Int] = None
+    editor: Optional[Actor] = None
+    fullDatabaseId: Optional[BigInt] = None
+    id: ID
+    includesCreatedEdit: Boolean
+    isMinimized: Boolean
+    issue: Issue
+    lastEditedAt: Optional[DateTime] = None
+    minimizedReason: Optional[String] = None
+    publishedAt: Optional[DateTime] = None
+    pullRequest: Optional[PullRequest] = None
+    reactionGroups: Optional[List[ReactionGroup]] = Field(default_factory=list)
+    reactions: ReactionConnection
+    repository: Repository
+    resourcePath: URI
+    updatedAt: DateTime
+    url: URI
+    userContentEdits: Optional[UserContentEditConnection] = None
+    viewerCanDelete: Boolean
+    viewerCanMinimize: Boolean
+    viewerCanReact: Boolean
+    viewerCanUpdate: Boolean
+    viewerCannotUpdateReasons: List[CommentCannotUpdateReason]
+    viewerDidAuthor: Boolean
+    typename__: Optional[Literal['IssueComment']] = Field(
+        'IssueComment', alias='__typename'
+    )
+
+
+class IssueCommentConnection(BaseModel):
+    """
+    The connection type for IssueComment.
+    """
+
+    edges: Optional[List[Optional[IssueCommentEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[IssueComment]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['IssueCommentConnection']] = Field(
+        'IssueCommentConnection', alias='__typename'
+    )
+
+
+class IssueCommentEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[IssueComment] = None
+    typename__: Optional[Literal['IssueCommentEdge']] = Field(
+        'IssueCommentEdge', alias='__typename'
+    )
+
+
+class IssueConnection(BaseModel):
+    """
+    The connection type for Issue.
+    """
+
+    edges: Optional[List[Optional[IssueEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Issue]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['IssueConnection']] = Field(
+        'IssueConnection', alias='__typename'
+    )
+
+
+class IssueContributionsByRepository(BaseModel):
+    """
+    This aggregates issues opened by a user within one repository.
+    """
+
+    contributions: CreatedIssueContributionConnection
+    repository: Repository
+    typename__: Optional[Literal['IssueContributionsByRepository']] = Field(
+        'IssueContributionsByRepository', alias='__typename'
+    )
+
+
+class IssueEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Issue] = None
+    typename__: Optional[Literal['IssueEdge']] = Field('IssueEdge', alias='__typename')
+
+
+class IssueTemplate(BaseModel):
+    """
+    A repository issue template.
+    """
+
+    about: Optional[String] = None
+    assignees: UserConnection
+    body: Optional[String] = None
+    filename: String
+    labels: Optional[LabelConnection] = None
+    name: String
+    title: Optional[String] = None
+    typename__: Optional[Literal['IssueTemplate']] = Field(
+        'IssueTemplate', alias='__typename'
+    )
+
+
+class IssueTimelineConnection(BaseModel):
+    """
+    The connection type for IssueTimelineItem.
+    """
+
+    edges: Optional[List[Optional[IssueTimelineItemEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[IssueTimelineItem]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['IssueTimelineConnection']] = Field(
+        'IssueTimelineConnection', alias='__typename'
+    )
+
+
+class IssueTimelineItemEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[IssueTimelineItem] = None
+    typename__: Optional[Literal['IssueTimelineItemEdge']] = Field(
+        'IssueTimelineItemEdge', alias='__typename'
+    )
+
+
+class IssueTimelineItemsConnection(BaseModel):
+    """
+    The connection type for IssueTimelineItems.
+    """
+
+    edges: Optional[List[Optional[IssueTimelineItemsEdge]]] = Field(
+        default_factory=list
+    )
+    filteredCount: Int
+    nodes: Optional[List[Optional[IssueTimelineItems]]] = Field(default_factory=list)
+    pageCount: Int
+    pageInfo: PageInfo
+    totalCount: Int
+    updatedAt: DateTime
+    typename__: Optional[Literal['IssueTimelineItemsConnection']] = Field(
+        'IssueTimelineItemsConnection', alias='__typename'
+    )
+
+
+class IssueTimelineItemsEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[IssueTimelineItems] = None
+    typename__: Optional[Literal['IssueTimelineItemsEdge']] = Field(
+        'IssueTimelineItemsEdge', alias='__typename'
+    )
+
+
+class JoinedGitHubContribution(Contribution):
+    """
+    Represents a user signing up for a GitHub account.
+    """
+
+    isRestricted: Boolean
+    occurredAt: DateTime
+    resourcePath: URI
+    url: URI
+    user: User
+    typename__: Optional[Literal['JoinedGitHubContribution']] = Field(
+        'JoinedGitHubContribution', alias='__typename'
+    )
+
+
+class Label(Node):
+    """
+    A label for categorizing Issues, Pull Requests, Milestones, or Discussions with a given Repository.
+    """
+
+    color: String
+    createdAt: Optional[DateTime] = None
+    description: Optional[String] = None
+    id: ID
+    isDefault: Boolean
+    issues: IssueConnection
+    name: String
+    pullRequests: PullRequestConnection
+    repository: Repository
+    resourcePath: URI
+    updatedAt: Optional[DateTime] = None
+    url: URI
+    typename__: Optional[Literal['Label']] = Field('Label', alias='__typename')
+
+
+class LabelConnection(BaseModel):
+    """
+    The connection type for Label.
+    """
+
+    edges: Optional[List[Optional[LabelEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Label]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['LabelConnection']] = Field(
+        'LabelConnection', alias='__typename'
+    )
+
+
+class LabelEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Label] = None
+    typename__: Optional[Literal['LabelEdge']] = Field('LabelEdge', alias='__typename')
+
+
+class LabeledEvent(Node):
+    """
+    Represents a 'labeled' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    label: Label
+    labelable: Labelable
+    typename__: Optional[Literal['LabeledEvent']] = Field(
+        'LabeledEvent', alias='__typename'
+    )
+
+
+class Language(Node):
+    """
+    Represents a given language found in repositories.
+    """
+
+    color: Optional[String] = None
+    id: ID
+    name: String
+    typename__: Optional[Literal['Language']] = Field('Language', alias='__typename')
+
+
+class LanguageConnection(BaseModel):
+    """
+    A list of languages associated with the parent.
+    """
+
+    edges: Optional[List[Optional[LanguageEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Language]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    totalSize: Int
+    typename__: Optional[Literal['LanguageConnection']] = Field(
+        'LanguageConnection', alias='__typename'
+    )
+
+
+class LanguageEdge(BaseModel):
+    """
+    Represents the language of a repository.
+    """
+
+    cursor: String
+    node: Language
+    size: Int
+    typename__: Optional[Literal['LanguageEdge']] = Field(
+        'LanguageEdge', alias='__typename'
+    )
+
+
+class License(Node):
+    """
+    A repository's open source license
+    """
+
+    body: String
+    conditions: List[Optional[LicenseRule]]
+    description: Optional[String] = None
+    featured: Boolean
+    hidden: Boolean
+    id: ID
+    implementation: Optional[String] = None
+    key: String
+    limitations: List[Optional[LicenseRule]]
+    name: String
+    nickname: Optional[String] = None
+    permissions: List[Optional[LicenseRule]]
+    pseudoLicense: Boolean
+    spdxId: Optional[String] = None
+    url: Optional[URI] = None
+    typename__: Optional[Literal['License']] = Field('License', alias='__typename')
+
+
+class LicenseRule(BaseModel):
+    """
+    Describes a License's conditions, permissions, and limitations
+    """
+
+    description: String
+    key: String
+    label: String
+    typename__: Optional[Literal['LicenseRule']] = Field(
+        'LicenseRule', alias='__typename'
+    )
+
+
+class LinkProjectV2ToRepositoryPayload(BaseModel):
+    """
+    Autogenerated return type of LinkProjectV2ToRepository
+    """
+
+    clientMutationId: Optional[String] = None
+    repository: Optional[Repository] = None
+    typename__: Optional[Literal['LinkProjectV2ToRepositoryPayload']] = Field(
+        'LinkProjectV2ToRepositoryPayload', alias='__typename'
+    )
+
+
+class LinkProjectV2ToTeamPayload(BaseModel):
+    """
+    Autogenerated return type of LinkProjectV2ToTeam
+    """
+
+    clientMutationId: Optional[String] = None
+    team: Optional[Team] = None
+    typename__: Optional[Literal['LinkProjectV2ToTeamPayload']] = Field(
+        'LinkProjectV2ToTeamPayload', alias='__typename'
+    )
+
+
+class LinkRepositoryToProjectPayload(BaseModel):
+    """
+    Autogenerated return type of LinkRepositoryToProject
+    """
+
+    clientMutationId: Optional[String] = None
+    project: Optional[Project] = None
+    repository: Optional[Repository] = None
+    typename__: Optional[Literal['LinkRepositoryToProjectPayload']] = Field(
+        'LinkRepositoryToProjectPayload', alias='__typename'
+    )
+
+
+class LinkedBranch(Node):
+    """
+    A branch linked to an issue.
+    """
+
+    id: ID
+    ref: Optional[Ref] = None
+    typename__: Optional[Literal['LinkedBranch']] = Field(
+        'LinkedBranch', alias='__typename'
+    )
+
+
+class LinkedBranchConnection(BaseModel):
+    """
+    The connection type for LinkedBranch.
+    """
+
+    edges: Optional[List[Optional[LinkedBranchEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[LinkedBranch]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['LinkedBranchConnection']] = Field(
+        'LinkedBranchConnection', alias='__typename'
+    )
+
+
+class LinkedBranchEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[LinkedBranch] = None
+    typename__: Optional[Literal['LinkedBranchEdge']] = Field(
+        'LinkedBranchEdge', alias='__typename'
+    )
+
+
+class LockLockablePayload(BaseModel):
+    """
+    Autogenerated return type of LockLockable
+    """
+
+    actor: Optional[Actor] = None
+    clientMutationId: Optional[String] = None
+    lockedRecord: Optional[Lockable] = None
+    typename__: Optional[Literal['LockLockablePayload']] = Field(
+        'LockLockablePayload', alias='__typename'
+    )
+
+
+class LockedEvent(Node):
+    """
+    Represents a 'locked' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    lockReason: Optional[LockReason] = None
+    lockable: Lockable
+    typename__: Optional[Literal['LockedEvent']] = Field(
+        'LockedEvent', alias='__typename'
+    )
+
+
+class Mannequin(Actor, Node, UniformResourceLocatable):
+    """
+    A placeholder user for attribution of imported data on GitHub.
+    """
+
+    avatarUrl: URI
+    claimant: Optional[User] = None
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    email: Optional[String] = None
+    id: ID
+    login: String
+    resourcePath: URI
+    updatedAt: DateTime
+    url: URI
+    typename__: Optional[Literal['Mannequin']] = Field('Mannequin', alias='__typename')
+
+
+class MannequinConnection(BaseModel):
+    """
+    The connection type for Mannequin.
+    """
+
+    edges: Optional[List[Optional[MannequinEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Mannequin]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['MannequinConnection']] = Field(
+        'MannequinConnection', alias='__typename'
+    )
+
+
+class MannequinEdge(BaseModel):
+    """
+    Represents a mannequin.
+    """
+
+    cursor: String
+    node: Optional[Mannequin] = None
+    typename__: Optional[Literal['MannequinEdge']] = Field(
+        'MannequinEdge', alias='__typename'
+    )
+
+
+class MarkDiscussionCommentAsAnswerPayload(BaseModel):
+    """
+    Autogenerated return type of MarkDiscussionCommentAsAnswer
+    """
+
+    clientMutationId: Optional[String] = None
+    discussion: Optional[Discussion] = None
+    typename__: Optional[Literal['MarkDiscussionCommentAsAnswerPayload']] = Field(
+        'MarkDiscussionCommentAsAnswerPayload', alias='__typename'
+    )
+
+
+class MarkFileAsViewedPayload(BaseModel):
+    """
+    Autogenerated return type of MarkFileAsViewed
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['MarkFileAsViewedPayload']] = Field(
+        'MarkFileAsViewedPayload', alias='__typename'
+    )
+
+
+class MarkProjectV2AsTemplatePayload(BaseModel):
+    """
+    Autogenerated return type of MarkProjectV2AsTemplate
+    """
+
+    clientMutationId: Optional[String] = None
+    projectV2: Optional[ProjectV2] = None
+    typename__: Optional[Literal['MarkProjectV2AsTemplatePayload']] = Field(
+        'MarkProjectV2AsTemplatePayload', alias='__typename'
+    )
+
+
+class MarkPullRequestReadyForReviewPayload(BaseModel):
+    """
+    Autogenerated return type of MarkPullRequestReadyForReview
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['MarkPullRequestReadyForReviewPayload']] = Field(
+        'MarkPullRequestReadyForReviewPayload', alias='__typename'
+    )
+
+
+class MarkedAsDuplicateEvent(Node):
+    """
+    Represents a 'marked_as_duplicate' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    canonical: Optional[IssueOrPullRequest] = None
+    createdAt: DateTime
+    duplicate: Optional[IssueOrPullRequest] = None
+    id: ID
+    isCrossRepository: Boolean
+    typename__: Optional[Literal['MarkedAsDuplicateEvent']] = Field(
+        'MarkedAsDuplicateEvent', alias='__typename'
+    )
+
+
+class MarketplaceCategory(Node):
+    """
+    A public description of a Marketplace category.
+    """
+
+    description: Optional[String] = None
+    howItWorks: Optional[String] = None
+    id: ID
+    name: String
+    primaryListingCount: Int
+    resourcePath: URI
+    secondaryListingCount: Int
+    slug: String
+    url: URI
+    typename__: Optional[Literal['MarketplaceCategory']] = Field(
+        'MarketplaceCategory', alias='__typename'
+    )
+
+
+class MarketplaceListing(Node):
+    """
+    A listing in the GitHub integration marketplace.
+    """
+
+    app: Optional[App] = None
+    companyUrl: Optional[URI] = None
+    configurationResourcePath: URI
+    configurationUrl: URI
+    documentationUrl: Optional[URI] = None
+    extendedDescription: Optional[String] = None
+    extendedDescriptionHTML: HTML
+    fullDescription: String
+    fullDescriptionHTML: HTML
+    hasPublishedFreeTrialPlans: Boolean
+    hasTermsOfService: Boolean
+    hasVerifiedOwner: Boolean
+    howItWorks: Optional[String] = None
+    howItWorksHTML: HTML
+    id: ID
+    installationUrl: Optional[URI] = None
+    installedForViewer: Boolean
+    isArchived: Boolean
+    isDraft: Boolean
+    isPaid: Boolean
+    isPublic: Boolean
+    isRejected: Boolean
+    isUnverified: Boolean
+    isUnverifiedPending: Boolean
+    isVerificationPendingFromDraft: Boolean
+    isVerificationPendingFromUnverified: Boolean
+    isVerified: Boolean
+    logoBackgroundColor: String
+    logoUrl: Optional[URI] = None
+    name: String
+    normalizedShortDescription: String
+    pricingUrl: Optional[URI] = None
+    primaryCategory: MarketplaceCategory
+    privacyPolicyUrl: URI
+    resourcePath: URI
+    screenshotUrls: List[Optional[String]]
+    secondaryCategory: Optional[MarketplaceCategory] = None
+    shortDescription: String
+    slug: String
+    statusUrl: Optional[URI] = None
+    supportEmail: Optional[String] = None
+    supportUrl: URI
+    termsOfServiceUrl: Optional[URI] = None
+    url: URI
+    viewerCanAddPlans: Boolean
+    viewerCanApprove: Boolean
+    viewerCanDelist: Boolean
+    viewerCanEdit: Boolean
+    viewerCanEditCategories: Boolean
+    viewerCanEditPlans: Boolean
+    viewerCanRedraft: Boolean
+    viewerCanReject: Boolean
+    viewerCanRequestApproval: Boolean
+    viewerHasPurchased: Boolean
+    viewerHasPurchasedForAllOrganizations: Boolean
+    viewerIsListingAdmin: Boolean
+    typename__: Optional[Literal['MarketplaceListing']] = Field(
+        'MarketplaceListing', alias='__typename'
+    )
+
+
+class MarketplaceListingConnection(BaseModel):
+    """
+    Look up Marketplace Listings
+    """
+
+    edges: Optional[List[Optional[MarketplaceListingEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[MarketplaceListing]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['MarketplaceListingConnection']] = Field(
+        'MarketplaceListingConnection', alias='__typename'
+    )
+
+
+class MarketplaceListingEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[MarketplaceListing] = None
+    typename__: Optional[Literal['MarketplaceListingEdge']] = Field(
+        'MarketplaceListingEdge', alias='__typename'
+    )
+
+
+class MemberFeatureRequestNotification(Node):
+    """
+    Represents a member feature request notification
+    """
+
+    body: String
+    id: ID
+    title: String
+    updatedAt: DateTime
+    typename__: Optional[Literal['MemberFeatureRequestNotification']] = Field(
+        'MemberFeatureRequestNotification', alias='__typename'
+    )
+
+
+class MembersCanDeleteReposClearAuditEntry(
+    AuditEntry, EnterpriseAuditEntryData, Node, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a members_can_delete_repos.clear event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    enterpriseResourcePath: Optional[URI] = None
+    enterpriseSlug: Optional[String] = None
+    enterpriseUrl: Optional[URI] = None
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['MembersCanDeleteReposClearAuditEntry']] = Field(
+        'MembersCanDeleteReposClearAuditEntry', alias='__typename'
+    )
+
+
+class MembersCanDeleteReposDisableAuditEntry(
+    AuditEntry, EnterpriseAuditEntryData, Node, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a members_can_delete_repos.disable event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    enterpriseResourcePath: Optional[URI] = None
+    enterpriseSlug: Optional[String] = None
+    enterpriseUrl: Optional[URI] = None
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['MembersCanDeleteReposDisableAuditEntry']] = Field(
+        'MembersCanDeleteReposDisableAuditEntry', alias='__typename'
+    )
+
+
+class MembersCanDeleteReposEnableAuditEntry(
+    AuditEntry, EnterpriseAuditEntryData, Node, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a members_can_delete_repos.enable event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    enterpriseResourcePath: Optional[URI] = None
+    enterpriseSlug: Optional[String] = None
+    enterpriseUrl: Optional[URI] = None
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['MembersCanDeleteReposEnableAuditEntry']] = Field(
+        'MembersCanDeleteReposEnableAuditEntry', alias='__typename'
+    )
+
+
+class MentionedEvent(Node):
+    """
+    Represents a 'mentioned' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    id: ID
+    typename__: Optional[Literal['MentionedEvent']] = Field(
+        'MentionedEvent', alias='__typename'
+    )
+
+
+class MergeBranchPayload(BaseModel):
+    """
+    Autogenerated return type of MergeBranch
+    """
+
+    clientMutationId: Optional[String] = None
+    mergeCommit: Optional[Commit] = None
+    typename__: Optional[Literal['MergeBranchPayload']] = Field(
+        'MergeBranchPayload', alias='__typename'
+    )
+
+
+class MergePullRequestPayload(BaseModel):
+    """
+    Autogenerated return type of MergePullRequest
+    """
+
+    actor: Optional[Actor] = None
+    clientMutationId: Optional[String] = None
+    pullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['MergePullRequestPayload']] = Field(
+        'MergePullRequestPayload', alias='__typename'
+    )
+
+
+class MergeQueue(Node):
+    """
+    The queue of pull request entries to be merged into a protected branch in a repository.
+    """
+
+    configuration: Optional[MergeQueueConfiguration] = None
+    entries: Optional[MergeQueueEntryConnection] = None
+    id: ID
+    nextEntryEstimatedTimeToMerge: Optional[Int] = None
+    repository: Optional[Repository] = None
+    resourcePath: URI
+    url: URI
+    typename__: Optional[Literal['MergeQueue']] = Field(
+        'MergeQueue', alias='__typename'
+    )
+
+
+class MergeQueueConfiguration(BaseModel):
+    """
+    Configuration for a MergeQueue
+    """
+
+    checkResponseTimeout: Optional[Int] = None
+    maximumEntriesToBuild: Optional[Int] = None
+    maximumEntriesToMerge: Optional[Int] = None
+    mergeMethod: Optional[PullRequestMergeMethod] = None
+    mergingStrategy: Optional[MergeQueueMergingStrategy] = None
+    minimumEntriesToMerge: Optional[Int] = None
+    minimumEntriesToMergeWaitTime: Optional[Int] = None
+    typename__: Optional[Literal['MergeQueueConfiguration']] = Field(
+        'MergeQueueConfiguration', alias='__typename'
+    )
+
+
+class MergeQueueEntry(Node):
+    """
+    Entries in a MergeQueue
+    """
+
+    baseCommit: Optional[Commit] = None
+    enqueuedAt: DateTime
+    enqueuer: Actor
+    estimatedTimeToMerge: Optional[Int] = None
+    headCommit: Optional[Commit] = None
+    id: ID
+    jump: Boolean
+    mergeQueue: Optional[MergeQueue] = None
+    position: Int
+    pullRequest: Optional[PullRequest] = None
+    solo: Boolean
+    state: MergeQueueEntryState
+    typename__: Optional[Literal['MergeQueueEntry']] = Field(
+        'MergeQueueEntry', alias='__typename'
+    )
+
+
+class MergeQueueEntryConnection(BaseModel):
+    """
+    The connection type for MergeQueueEntry.
+    """
+
+    edges: Optional[List[Optional[MergeQueueEntryEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[MergeQueueEntry]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['MergeQueueEntryConnection']] = Field(
+        'MergeQueueEntryConnection', alias='__typename'
+    )
+
+
+class MergeQueueEntryEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[MergeQueueEntry] = None
+    typename__: Optional[Literal['MergeQueueEntryEdge']] = Field(
+        'MergeQueueEntryEdge', alias='__typename'
+    )
+
+
+class MergedEvent(Node, UniformResourceLocatable):
+    """
+    Represents a 'merged' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    commit: Optional[Commit] = None
+    createdAt: DateTime
+    id: ID
+    mergeRef: Optional[Ref] = None
+    mergeRefName: String
+    pullRequest: PullRequest
+    resourcePath: URI
+    url: URI
+    typename__: Optional[Literal['MergedEvent']] = Field(
+        'MergedEvent', alias='__typename'
+    )
+
+
+class MigrationSource(Node):
+    """
+    A GitHub Enterprise Importer (GEI) migration source.
+    """
+
+    id: ID
+    name: String
+    type: MigrationSourceType
+    url: URI
+    typename__: Optional[Literal['MigrationSource']] = Field(
+        'MigrationSource', alias='__typename'
+    )
+
+
+class Milestone(Closable, Node, UniformResourceLocatable):
+    """
+    Represents a Milestone object on a given repository.
+    """
+
+    closed: Boolean
+    closedAt: Optional[DateTime] = None
+    createdAt: DateTime
+    creator: Optional[Actor] = None
+    description: Optional[String] = None
+    dueOn: Optional[DateTime] = None
+    id: ID
+    issues: IssueConnection
+    number: Int
+    progressPercentage: Float
+    pullRequests: PullRequestConnection
+    repository: Repository
+    resourcePath: URI
+    state: MilestoneState
+    title: String
+    updatedAt: DateTime
+    url: URI
+    viewerCanClose: Boolean
+    viewerCanReopen: Boolean
+    typename__: Optional[Literal['Milestone']] = Field('Milestone', alias='__typename')
+
+
+class MilestoneConnection(BaseModel):
+    """
+    The connection type for Milestone.
+    """
+
+    edges: Optional[List[Optional[MilestoneEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Milestone]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['MilestoneConnection']] = Field(
+        'MilestoneConnection', alias='__typename'
+    )
+
+
+class MilestoneEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Milestone] = None
+    typename__: Optional[Literal['MilestoneEdge']] = Field(
+        'MilestoneEdge', alias='__typename'
+    )
+
+
+class MilestonedEvent(Node):
+    """
+    Represents a 'milestoned' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    milestoneTitle: String
+    subject: MilestoneItem
+    typename__: Optional[Literal['MilestonedEvent']] = Field(
+        'MilestonedEvent', alias='__typename'
+    )
+
+
+class MinimizeCommentPayload(BaseModel):
+    """
+    Autogenerated return type of MinimizeComment
+    """
+
+    clientMutationId: Optional[String] = None
+    minimizedComment: Optional[Minimizable] = None
+    typename__: Optional[Literal['MinimizeCommentPayload']] = Field(
+        'MinimizeCommentPayload', alias='__typename'
+    )
+
+
+class MoveProjectCardPayload(BaseModel):
+    """
+    Autogenerated return type of MoveProjectCard
+    """
+
+    cardEdge: Optional[ProjectCardEdge] = None
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['MoveProjectCardPayload']] = Field(
+        'MoveProjectCardPayload', alias='__typename'
+    )
+
+
+class MoveProjectColumnPayload(BaseModel):
+    """
+    Autogenerated return type of MoveProjectColumn
+    """
+
+    clientMutationId: Optional[String] = None
+    columnEdge: Optional[ProjectColumnEdge] = None
+    typename__: Optional[Literal['MoveProjectColumnPayload']] = Field(
+        'MoveProjectColumnPayload', alias='__typename'
+    )
+
+
+class MovedColumnsInProjectEvent(Node):
+    """
+    Represents a 'moved_columns_in_project' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    id: ID
+    previousProjectColumnName: String
+    project: Optional[Project] = None
+    projectCard: Optional[ProjectCard] = None
+    projectColumnName: String
+    typename__: Optional[Literal['MovedColumnsInProjectEvent']] = Field(
+        'MovedColumnsInProjectEvent', alias='__typename'
+    )
+
+
+class OIDCProvider(Node):
+    """
+    An OIDC identity provider configured to provision identities for an enterprise.
+    Visible to enterprise owners or enterprise owners' personal access tokens
+    (classic) with read:enterprise or admin:enterprise scope.
+    """
+
+    enterprise: Optional[Enterprise] = None
+    externalIdentities: ExternalIdentityConnection
+    id: ID
+    providerType: OIDCProviderType
+    tenantId: String
+    typename__: Optional[Literal['OIDCProvider']] = Field(
+        'OIDCProvider', alias='__typename'
+    )
+
+
+class OauthApplicationCreateAuditEntry(
+    AuditEntry, Node, OauthApplicationAuditEntryData, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a oauth_application.create event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    applicationUrl: Optional[URI] = None
+    callbackUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    oauthApplicationName: Optional[String] = None
+    oauthApplicationResourcePath: Optional[URI] = None
+    oauthApplicationUrl: Optional[URI] = None
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    rateLimit: Optional[Int] = None
+    state: Optional[OauthApplicationCreateAuditEntryState] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OauthApplicationCreateAuditEntry']] = Field(
+        'OauthApplicationCreateAuditEntry', alias='__typename'
+    )
+
+
+class OrgAddBillingManagerAuditEntry(AuditEntry, Node, OrganizationAuditEntryData):
+    """
+    Audit log entry for a org.add_billing_manager
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    invitationEmail: Optional[String] = None
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgAddBillingManagerAuditEntry']] = Field(
+        'OrgAddBillingManagerAuditEntry', alias='__typename'
+    )
+
+
+class OrgAddMemberAuditEntry(AuditEntry, Node, OrganizationAuditEntryData):
+    """
+    Audit log entry for a org.add_member
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    permission: Optional[OrgAddMemberAuditEntryPermission] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgAddMemberAuditEntry']] = Field(
+        'OrgAddMemberAuditEntry', alias='__typename'
+    )
+
+
+class OrgBlockUserAuditEntry(AuditEntry, Node, OrganizationAuditEntryData):
+    """
+    Audit log entry for a org.block_user
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    blockedUser: Optional[User] = None
+    blockedUserName: Optional[String] = None
+    blockedUserResourcePath: Optional[URI] = None
+    blockedUserUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgBlockUserAuditEntry']] = Field(
+        'OrgBlockUserAuditEntry', alias='__typename'
+    )
+
+
+class OrgConfigDisableCollaboratorsOnlyAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a org.config.disable_collaborators_only event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[
+        Literal['OrgConfigDisableCollaboratorsOnlyAuditEntry']
+    ] = Field('OrgConfigDisableCollaboratorsOnlyAuditEntry', alias='__typename')
+
+
+class OrgConfigEnableCollaboratorsOnlyAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a org.config.enable_collaborators_only event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgConfigEnableCollaboratorsOnlyAuditEntry']] = Field(
+        'OrgConfigEnableCollaboratorsOnlyAuditEntry', alias='__typename'
+    )
+
+
+class OrgCreateAuditEntry(AuditEntry, Node, OrganizationAuditEntryData):
+    """
+    Audit log entry for a org.create event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    billingPlan: Optional[OrgCreateAuditEntryBillingPlan] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgCreateAuditEntry']] = Field(
+        'OrgCreateAuditEntry', alias='__typename'
+    )
+
+
+class OrgDisableOauthAppRestrictionsAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a org.disable_oauth_app_restrictions event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgDisableOauthAppRestrictionsAuditEntry']] = Field(
+        'OrgDisableOauthAppRestrictionsAuditEntry', alias='__typename'
+    )
+
+
+class OrgDisableSamlAuditEntry(AuditEntry, Node, OrganizationAuditEntryData):
+    """
+    Audit log entry for a org.disable_saml event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    digestMethodUrl: Optional[URI] = None
+    id: ID
+    issuerUrl: Optional[URI] = None
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    signatureMethodUrl: Optional[URI] = None
+    singleSignOnUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgDisableSamlAuditEntry']] = Field(
+        'OrgDisableSamlAuditEntry', alias='__typename'
+    )
+
+
+class OrgDisableTwoFactorRequirementAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a org.disable_two_factor_requirement event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgDisableTwoFactorRequirementAuditEntry']] = Field(
+        'OrgDisableTwoFactorRequirementAuditEntry', alias='__typename'
+    )
+
+
+class OrgEnableOauthAppRestrictionsAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a org.enable_oauth_app_restrictions event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgEnableOauthAppRestrictionsAuditEntry']] = Field(
+        'OrgEnableOauthAppRestrictionsAuditEntry', alias='__typename'
+    )
+
+
+class OrgEnableSamlAuditEntry(AuditEntry, Node, OrganizationAuditEntryData):
+    """
+    Audit log entry for a org.enable_saml event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    digestMethodUrl: Optional[URI] = None
+    id: ID
+    issuerUrl: Optional[URI] = None
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    signatureMethodUrl: Optional[URI] = None
+    singleSignOnUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgEnableSamlAuditEntry']] = Field(
+        'OrgEnableSamlAuditEntry', alias='__typename'
+    )
+
+
+class OrgEnableTwoFactorRequirementAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a org.enable_two_factor_requirement event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgEnableTwoFactorRequirementAuditEntry']] = Field(
+        'OrgEnableTwoFactorRequirementAuditEntry', alias='__typename'
+    )
+
+
+class OrgInviteMemberAuditEntry(AuditEntry, Node, OrganizationAuditEntryData):
+    """
+    Audit log entry for a org.invite_member event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    email: Optional[String] = None
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationInvitation: Optional[OrganizationInvitation] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgInviteMemberAuditEntry']] = Field(
+        'OrgInviteMemberAuditEntry', alias='__typename'
+    )
+
+
+class OrgInviteToBusinessAuditEntry(
+    AuditEntry, EnterpriseAuditEntryData, Node, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a org.invite_to_business event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    enterpriseResourcePath: Optional[URI] = None
+    enterpriseSlug: Optional[String] = None
+    enterpriseUrl: Optional[URI] = None
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgInviteToBusinessAuditEntry']] = Field(
+        'OrgInviteToBusinessAuditEntry', alias='__typename'
+    )
+
+
+class OrgOauthAppAccessApprovedAuditEntry(
+    AuditEntry, Node, OauthApplicationAuditEntryData, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a org.oauth_app_access_approved event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    oauthApplicationName: Optional[String] = None
+    oauthApplicationResourcePath: Optional[URI] = None
+    oauthApplicationUrl: Optional[URI] = None
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgOauthAppAccessApprovedAuditEntry']] = Field(
+        'OrgOauthAppAccessApprovedAuditEntry', alias='__typename'
+    )
+
+
+class OrgOauthAppAccessBlockedAuditEntry(
+    AuditEntry, Node, OauthApplicationAuditEntryData, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a org.oauth_app_access_blocked event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    oauthApplicationName: Optional[String] = None
+    oauthApplicationResourcePath: Optional[URI] = None
+    oauthApplicationUrl: Optional[URI] = None
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgOauthAppAccessBlockedAuditEntry']] = Field(
+        'OrgOauthAppAccessBlockedAuditEntry', alias='__typename'
+    )
+
+
+class OrgOauthAppAccessDeniedAuditEntry(
+    AuditEntry, Node, OauthApplicationAuditEntryData, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a org.oauth_app_access_denied event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    oauthApplicationName: Optional[String] = None
+    oauthApplicationResourcePath: Optional[URI] = None
+    oauthApplicationUrl: Optional[URI] = None
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgOauthAppAccessDeniedAuditEntry']] = Field(
+        'OrgOauthAppAccessDeniedAuditEntry', alias='__typename'
+    )
+
+
+class OrgOauthAppAccessRequestedAuditEntry(
+    AuditEntry, Node, OauthApplicationAuditEntryData, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a org.oauth_app_access_requested event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    oauthApplicationName: Optional[String] = None
+    oauthApplicationResourcePath: Optional[URI] = None
+    oauthApplicationUrl: Optional[URI] = None
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgOauthAppAccessRequestedAuditEntry']] = Field(
+        'OrgOauthAppAccessRequestedAuditEntry', alias='__typename'
+    )
+
+
+class OrgOauthAppAccessUnblockedAuditEntry(
+    AuditEntry, Node, OauthApplicationAuditEntryData, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a org.oauth_app_access_unblocked event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    oauthApplicationName: Optional[String] = None
+    oauthApplicationResourcePath: Optional[URI] = None
+    oauthApplicationUrl: Optional[URI] = None
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgOauthAppAccessUnblockedAuditEntry']] = Field(
+        'OrgOauthAppAccessUnblockedAuditEntry', alias='__typename'
+    )
+
+
+class OrgRemoveBillingManagerAuditEntry(AuditEntry, Node, OrganizationAuditEntryData):
+    """
+    Audit log entry for a org.remove_billing_manager event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    reason: Optional[OrgRemoveBillingManagerAuditEntryReason] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgRemoveBillingManagerAuditEntry']] = Field(
+        'OrgRemoveBillingManagerAuditEntry', alias='__typename'
+    )
+
+
+class OrgRemoveMemberAuditEntry(AuditEntry, Node, OrganizationAuditEntryData):
+    """
+    Audit log entry for a org.remove_member event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    membershipTypes: Optional[List[OrgRemoveMemberAuditEntryMembershipType]] = Field(
+        default_factory=list
+    )
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    reason: Optional[OrgRemoveMemberAuditEntryReason] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgRemoveMemberAuditEntry']] = Field(
+        'OrgRemoveMemberAuditEntry', alias='__typename'
+    )
+
+
+class OrgRemoveOutsideCollaboratorAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a org.remove_outside_collaborator event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    membershipTypes: Optional[
+        List[OrgRemoveOutsideCollaboratorAuditEntryMembershipType]
+    ] = Field(default_factory=list)
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    reason: Optional[OrgRemoveOutsideCollaboratorAuditEntryReason] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgRemoveOutsideCollaboratorAuditEntry']] = Field(
+        'OrgRemoveOutsideCollaboratorAuditEntry', alias='__typename'
+    )
+
+
+class OrgRestoreMemberAuditEntry(AuditEntry, Node, OrganizationAuditEntryData):
+    """
+    Audit log entry for a org.restore_member event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    restoredCustomEmailRoutingsCount: Optional[Int] = None
+    restoredIssueAssignmentsCount: Optional[Int] = None
+    restoredMemberships: Optional[List[OrgRestoreMemberAuditEntryMembership]] = Field(
+        default_factory=list
+    )
+    restoredMembershipsCount: Optional[Int] = None
+    restoredRepositoriesCount: Optional[Int] = None
+    restoredRepositoryStarsCount: Optional[Int] = None
+    restoredRepositoryWatchesCount: Optional[Int] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgRestoreMemberAuditEntry']] = Field(
+        'OrgRestoreMemberAuditEntry', alias='__typename'
+    )
+
+
+class OrgRestoreMemberMembershipOrganizationAuditEntryData(OrganizationAuditEntryData):
+    """
+    Metadata for an organization membership for org.restore_member actions
+    """
+
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    typename__: Optional[
+        Literal['OrgRestoreMemberMembershipOrganizationAuditEntryData']
+    ] = Field(
+        'OrgRestoreMemberMembershipOrganizationAuditEntryData', alias='__typename'
+    )
+
+
+class OrgRestoreMemberMembershipRepositoryAuditEntryData(RepositoryAuditEntryData):
+    """
+    Metadata for a repository membership for org.restore_member actions
+    """
+
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    typename__: Optional[
+        Literal['OrgRestoreMemberMembershipRepositoryAuditEntryData']
+    ] = Field('OrgRestoreMemberMembershipRepositoryAuditEntryData', alias='__typename')
+
+
+class OrgRestoreMemberMembershipTeamAuditEntryData(TeamAuditEntryData):
+    """
+    Metadata for a team membership for org.restore_member actions
+    """
+
+    team: Optional[Team] = None
+    teamName: Optional[String] = None
+    teamResourcePath: Optional[URI] = None
+    teamUrl: Optional[URI] = None
+    typename__: Optional[
+        Literal['OrgRestoreMemberMembershipTeamAuditEntryData']
+    ] = Field('OrgRestoreMemberMembershipTeamAuditEntryData', alias='__typename')
+
+
+class OrgUnblockUserAuditEntry(AuditEntry, Node, OrganizationAuditEntryData):
+    """
+    Audit log entry for a org.unblock_user
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    blockedUser: Optional[User] = None
+    blockedUserName: Optional[String] = None
+    blockedUserResourcePath: Optional[URI] = None
+    blockedUserUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgUnblockUserAuditEntry']] = Field(
+        'OrgUnblockUserAuditEntry', alias='__typename'
+    )
+
+
+class OrgUpdateDefaultRepositoryPermissionAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a org.update_default_repository_permission
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    permission: Optional[
+        OrgUpdateDefaultRepositoryPermissionAuditEntryPermission
+    ] = None
+    permissionWas: Optional[
+        OrgUpdateDefaultRepositoryPermissionAuditEntryPermission
+    ] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[
+        Literal['OrgUpdateDefaultRepositoryPermissionAuditEntry']
+    ] = Field('OrgUpdateDefaultRepositoryPermissionAuditEntry', alias='__typename')
+
+
+class OrgUpdateMemberAuditEntry(AuditEntry, Node, OrganizationAuditEntryData):
+    """
+    Audit log entry for a org.update_member event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    permission: Optional[OrgUpdateMemberAuditEntryPermission] = None
+    permissionWas: Optional[OrgUpdateMemberAuditEntryPermission] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrgUpdateMemberAuditEntry']] = Field(
+        'OrgUpdateMemberAuditEntry', alias='__typename'
+    )
+
+
+class OrgUpdateMemberRepositoryCreationPermissionAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a org.update_member_repository_creation_permission event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    canCreateRepositories: Optional[Boolean] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    visibility: Optional[
+        OrgUpdateMemberRepositoryCreationPermissionAuditEntryVisibility
+    ] = None
+    typename__: Optional[
+        Literal['OrgUpdateMemberRepositoryCreationPermissionAuditEntry']
+    ] = Field(
+        'OrgUpdateMemberRepositoryCreationPermissionAuditEntry', alias='__typename'
+    )
+
+
+class OrgUpdateMemberRepositoryInvitationPermissionAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a org.update_member_repository_invitation_permission event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    canInviteOutsideCollaboratorsToRepositories: Optional[Boolean] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[
+        Literal['OrgUpdateMemberRepositoryInvitationPermissionAuditEntry']
+    ] = Field(
+        'OrgUpdateMemberRepositoryInvitationPermissionAuditEntry', alias='__typename'
+    )
+
+
+class Organization(
+    Actor,
+    AnnouncementBanner,
+    MemberStatusable,
+    Node,
+    PackageOwner,
+    ProfileOwner,
+    ProjectOwner,
+    ProjectV2Owner,
+    ProjectV2Recent,
+    RepositoryDiscussionAuthor,
+    RepositoryDiscussionCommentAuthor,
+    RepositoryOwner,
+    Sponsorable,
+    UniformResourceLocatable,
+):
+    """
+    An account on GitHub, with one or more owners, that has repositories, members and teams.
+    """
+
+    announcement: Optional[String] = None
+    announcementExpiresAt: Optional[DateTime] = None
+    announcementUserDismissible: Optional[Boolean] = None
+    anyPinnableItems: Boolean
+    archivedAt: Optional[DateTime] = None
+    auditLog: OrganizationAuditEntryConnection
+    avatarUrl: URI
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    description: Optional[String] = None
+    descriptionHTML: Optional[String] = None
+    domains: Optional[VerifiableDomainConnection] = None
+    email: Optional[String] = None
+    enterpriseOwners: OrganizationEnterpriseOwnerConnection
+    estimatedNextSponsorsPayoutInCents: Int
+    hasSponsorsListing: Boolean
+    id: ID
+    interactionAbility: Optional[RepositoryInteractionAbility] = None
+    ipAllowListEnabledSetting: IpAllowListEnabledSettingValue
+    ipAllowListEntries: IpAllowListEntryConnection
+    ipAllowListForInstalledAppsEnabledSetting: IpAllowListForInstalledAppsEnabledSettingValue
+    isSponsoredBy: Boolean
+    isSponsoringViewer: Boolean
+    isVerified: Boolean
+    itemShowcase: ProfileItemShowcase
+    location: Optional[String] = None
+    login: String
+    mannequins: MannequinConnection
+    memberStatuses: UserStatusConnection
+    membersCanForkPrivateRepositories: Boolean
+    membersWithRole: OrganizationMemberConnection
+    monthlyEstimatedSponsorsIncomeInCents: Int
+    name: Optional[String] = None
+    newTeamResourcePath: URI
+    newTeamUrl: URI
+    notificationDeliveryRestrictionEnabledSetting: NotificationRestrictionSettingValue
+    organizationBillingEmail: Optional[String] = None
+    packages: PackageConnection
+    pendingMembers: UserConnection
+    pinnableItems: PinnableItemConnection
+    pinnedItems: PinnableItemConnection
+    pinnedItemsRemaining: Int
+    project: Optional[Project] = None
+    projectV2: Optional[ProjectV2] = None
+    projects: ProjectConnection
+    projectsResourcePath: URI
+    projectsUrl: URI
+    projectsV2: ProjectV2Connection
+    recentProjects: ProjectV2Connection
+    repositories: RepositoryConnection
+    repository: Optional[Repository] = None
+    repositoryDiscussionComments: DiscussionCommentConnection
+    repositoryDiscussions: DiscussionConnection
+    repositoryMigrations: RepositoryMigrationConnection
+    requiresTwoFactorAuthentication: Optional[Boolean] = None
+    resourcePath: URI
+    ruleset: Optional[RepositoryRuleset] = None
+    rulesets: Optional[RepositoryRulesetConnection] = None
+    samlIdentityProvider: Optional[OrganizationIdentityProvider] = None
+    sponsoring: SponsorConnection
+    sponsors: SponsorConnection
+    sponsorsActivities: SponsorsActivityConnection
+    sponsorsListing: Optional[SponsorsListing] = None
+    sponsorshipForViewerAsSponsor: Optional[Sponsorship] = None
+    sponsorshipForViewerAsSponsorable: Optional[Sponsorship] = None
+    sponsorshipNewsletters: SponsorshipNewsletterConnection
+    sponsorshipsAsMaintainer: SponsorshipConnection
+    sponsorshipsAsSponsor: SponsorshipConnection
+    team: Optional[Team] = None
+    teams: TeamConnection
+    teamsResourcePath: URI
+    teamsUrl: URI
+    totalSponsorshipAmountAsSponsorInCents: Optional[Int] = None
+    twitterUsername: Optional[String] = None
+    updatedAt: DateTime
+    url: URI
+    viewerCanAdminister: Boolean
+    viewerCanChangePinnedItems: Boolean
+    viewerCanCreateProjects: Boolean
+    viewerCanCreateRepositories: Boolean
+    viewerCanCreateTeams: Boolean
+    viewerCanSponsor: Boolean
+    viewerIsAMember: Boolean
+    viewerIsFollowing: Boolean
+    viewerIsSponsoring: Boolean
+    webCommitSignoffRequired: Boolean
+    websiteUrl: Optional[URI] = None
+    typename__: Optional[Literal['Organization']] = Field(
+        'Organization', alias='__typename'
+    )
+
+
+class OrganizationAuditEntryConnection(BaseModel):
+    """
+    The connection type for OrganizationAuditEntry.
+    """
+
+    edges: Optional[List[Optional[OrganizationAuditEntryEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[OrganizationAuditEntry]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['OrganizationAuditEntryConnection']] = Field(
+        'OrganizationAuditEntryConnection', alias='__typename'
+    )
+
+
+class OrganizationAuditEntryEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[OrganizationAuditEntry] = None
+    typename__: Optional[Literal['OrganizationAuditEntryEdge']] = Field(
+        'OrganizationAuditEntryEdge', alias='__typename'
+    )
+
+
+class OrganizationConnection(BaseModel):
+    """
+    A list of organizations managed by an enterprise.
+    """
+
+    edges: Optional[List[Optional[OrganizationEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Organization]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['OrganizationConnection']] = Field(
+        'OrganizationConnection', alias='__typename'
+    )
+
+
+class OrganizationEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Organization] = None
+    typename__: Optional[Literal['OrganizationEdge']] = Field(
+        'OrganizationEdge', alias='__typename'
+    )
+
+
+class OrganizationEnterpriseOwnerConnection(BaseModel):
+    """
+    The connection type for User.
+    """
+
+    edges: Optional[List[Optional[OrganizationEnterpriseOwnerEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[User]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['OrganizationEnterpriseOwnerConnection']] = Field(
+        'OrganizationEnterpriseOwnerConnection', alias='__typename'
+    )
+
+
+class OrganizationEnterpriseOwnerEdge(BaseModel):
+    """
+    An enterprise owner in the context of an organization that is part of the enterprise.
+    """
+
+    cursor: String
+    node: Optional[User] = None
+    organizationRole: RoleInOrganization
+    typename__: Optional[Literal['OrganizationEnterpriseOwnerEdge']] = Field(
+        'OrganizationEnterpriseOwnerEdge', alias='__typename'
+    )
+
+
+class OrganizationIdentityProvider(Node):
+    """
+    An Identity Provider configured to provision SAML and SCIM identities for
+    Organizations. Visible to (1) organization owners, (2) organization owners'
+    personal access tokens (classic) with read:org or admin:org scope, (3) GitHub
+    App with an installation token with read or write access to members.
+    """
+
+    digestMethod: Optional[URI] = None
+    externalIdentities: ExternalIdentityConnection
+    id: ID
+    idpCertificate: Optional[X509Certificate] = None
+    issuer: Optional[String] = None
+    organization: Optional[Organization] = None
+    signatureMethod: Optional[URI] = None
+    ssoUrl: Optional[URI] = None
+    typename__: Optional[Literal['OrganizationIdentityProvider']] = Field(
+        'OrganizationIdentityProvider', alias='__typename'
+    )
+
+
+class OrganizationInvitation(Node):
+    """
+    An Invitation for a user to an organization.
+    """
+
+    createdAt: DateTime
+    email: Optional[String] = None
+    id: ID
+    invitationSource: OrganizationInvitationSource
+    invitationType: OrganizationInvitationType
+    invitee: Optional[User] = None
+    inviter: User
+    organization: Organization
+    role: OrganizationInvitationRole
+    typename__: Optional[Literal['OrganizationInvitation']] = Field(
+        'OrganizationInvitation', alias='__typename'
+    )
+
+
+class OrganizationInvitationConnection(BaseModel):
+    """
+    The connection type for OrganizationInvitation.
+    """
+
+    edges: Optional[List[Optional[OrganizationInvitationEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[OrganizationInvitation]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['OrganizationInvitationConnection']] = Field(
+        'OrganizationInvitationConnection', alias='__typename'
+    )
+
+
+class OrganizationInvitationEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[OrganizationInvitation] = None
+    typename__: Optional[Literal['OrganizationInvitationEdge']] = Field(
+        'OrganizationInvitationEdge', alias='__typename'
+    )
+
+
+class OrganizationMemberConnection(BaseModel):
+    """
+    The connection type for User.
+    """
+
+    edges: Optional[List[Optional[OrganizationMemberEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[User]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['OrganizationMemberConnection']] = Field(
+        'OrganizationMemberConnection', alias='__typename'
+    )
+
+
+class OrganizationMemberEdge(BaseModel):
+    """
+    Represents a user within an organization.
+    """
+
+    cursor: String
+    hasTwoFactorEnabled: Optional[Boolean] = None
+    node: Optional[User] = None
+    role: Optional[OrganizationMemberRole] = None
+    typename__: Optional[Literal['OrganizationMemberEdge']] = Field(
+        'OrganizationMemberEdge', alias='__typename'
+    )
+
+
+class OrganizationMigration(Node):
+    """
+    A GitHub Enterprise Importer (GEI) organization migration.
+    """
+
+    createdAt: DateTime
+    databaseId: Optional[String] = None
+    failureReason: Optional[String] = None
+    id: ID
+    remainingRepositoriesCount: Optional[Int] = None
+    sourceOrgName: String
+    sourceOrgUrl: URI
+    state: OrganizationMigrationState
+    targetOrgName: String
+    totalRepositoriesCount: Optional[Int] = None
+    typename__: Optional[Literal['OrganizationMigration']] = Field(
+        'OrganizationMigration', alias='__typename'
+    )
+
+
+class OrganizationTeamsHovercardContext(HovercardContext):
+    """
+    An organization teams hovercard context
+    """
+
+    message: String
+    octicon: String
+    relevantTeams: TeamConnection
+    teamsResourcePath: URI
+    teamsUrl: URI
+    totalTeamCount: Int
+    typename__: Optional[Literal['OrganizationTeamsHovercardContext']] = Field(
+        'OrganizationTeamsHovercardContext', alias='__typename'
+    )
+
+
+class OrganizationsHovercardContext(HovercardContext):
+    """
+    An organization list hovercard context
+    """
+
+    message: String
+    octicon: String
+    relevantOrganizations: OrganizationConnection
+    totalOrganizationCount: Int
+    typename__: Optional[Literal['OrganizationsHovercardContext']] = Field(
+        'OrganizationsHovercardContext', alias='__typename'
+    )
+
+
+class Package(Node):
+    """
+    Information for an uploaded package.
+    """
+
+    id: ID
+    latestVersion: Optional[PackageVersion] = None
+    name: String
+    packageType: PackageType
+    repository: Optional[Repository] = None
+    statistics: Optional[PackageStatistics] = None
+    version: Optional[PackageVersion] = None
+    versions: PackageVersionConnection
+    typename__: Optional[Literal['Package']] = Field('Package', alias='__typename')
+
+
+class PackageConnection(BaseModel):
+    """
+    The connection type for Package.
+    """
+
+    edges: Optional[List[Optional[PackageEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Package]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['PackageConnection']] = Field(
+        'PackageConnection', alias='__typename'
+    )
+
+
+class PackageEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Package] = None
+    typename__: Optional[Literal['PackageEdge']] = Field(
+        'PackageEdge', alias='__typename'
+    )
+
+
+class PackageFile(Node):
+    """
+    A file in a package version.
+    """
+
+    id: ID
+    md5: Optional[String] = None
+    name: String
+    packageVersion: Optional[PackageVersion] = None
+    sha1: Optional[String] = None
+    sha256: Optional[String] = None
+    size: Optional[Int] = None
+    updatedAt: DateTime
+    url: Optional[URI] = None
+    typename__: Optional[Literal['PackageFile']] = Field(
+        'PackageFile', alias='__typename'
+    )
+
+
+class PackageFileConnection(BaseModel):
+    """
+    The connection type for PackageFile.
+    """
+
+    edges: Optional[List[Optional[PackageFileEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[PackageFile]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['PackageFileConnection']] = Field(
+        'PackageFileConnection', alias='__typename'
+    )
+
+
+class PackageFileEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[PackageFile] = None
+    typename__: Optional[Literal['PackageFileEdge']] = Field(
+        'PackageFileEdge', alias='__typename'
+    )
+
+
+class PackageStatistics(BaseModel):
+    """
+    Represents a object that contains package activity statistics such as downloads.
+    """
+
+    downloadsTotalCount: Int
+    typename__: Optional[Literal['PackageStatistics']] = Field(
+        'PackageStatistics', alias='__typename'
+    )
+
+
+class PackageTag(Node):
+    """
+    A version tag contains the mapping between a tag name and a version.
+    """
+
+    id: ID
+    name: String
+    version: Optional[PackageVersion] = None
+    typename__: Optional[Literal['PackageTag']] = Field(
+        'PackageTag', alias='__typename'
+    )
+
+
+class PackageVersion(Node):
+    """
+    Information about a specific package version.
+    """
+
+    files: PackageFileConnection
+    id: ID
+    package: Optional[Package] = None
+    platform: Optional[String] = None
+    preRelease: Boolean
+    readme: Optional[String] = None
+    release: Optional[Release] = None
+    statistics: Optional[PackageVersionStatistics] = None
+    summary: Optional[String] = None
+    version: String
+    typename__: Optional[Literal['PackageVersion']] = Field(
+        'PackageVersion', alias='__typename'
+    )
+
+
+class PackageVersionConnection(BaseModel):
+    """
+    The connection type for PackageVersion.
+    """
+
+    edges: Optional[List[Optional[PackageVersionEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[PackageVersion]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['PackageVersionConnection']] = Field(
+        'PackageVersionConnection', alias='__typename'
+    )
+
+
+class PackageVersionEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[PackageVersion] = None
+    typename__: Optional[Literal['PackageVersionEdge']] = Field(
+        'PackageVersionEdge', alias='__typename'
+    )
+
+
+class PackageVersionStatistics(BaseModel):
+    """
+    Represents a object that contains package version activity statistics such as downloads.
+    """
+
+    downloadsTotalCount: Int
+    typename__: Optional[Literal['PackageVersionStatistics']] = Field(
+        'PackageVersionStatistics', alias='__typename'
+    )
+
+
+class PageInfo(BaseModel):
+    """
+    Information about pagination in a connection.
+    """
+
+    endCursor: Optional[String] = None
+    hasNextPage: Boolean
+    hasPreviousPage: Boolean
+    startCursor: Optional[String] = None
+    typename__: Optional[Literal['PageInfo']] = Field('PageInfo', alias='__typename')
+
+
+class PermissionSource(BaseModel):
+    """
+    A level of permission and source for a user's access to a repository.
+    """
+
+    organization: Organization
+    permission: DefaultRepositoryPermissionField
+    roleName: Optional[String] = None
+    source: PermissionGranter
+    typename__: Optional[Literal['PermissionSource']] = Field(
+        'PermissionSource', alias='__typename'
+    )
+
+
+class PinIssuePayload(BaseModel):
+    """
+    Autogenerated return type of PinIssue
+    """
+
+    clientMutationId: Optional[String] = None
+    issue: Optional[Issue] = None
+    typename__: Optional[Literal['PinIssuePayload']] = Field(
+        'PinIssuePayload', alias='__typename'
+    )
+
+
+class PinnableItemConnection(BaseModel):
+    """
+    The connection type for PinnableItem.
+    """
+
+    edges: Optional[List[Optional[PinnableItemEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[PinnableItem]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['PinnableItemConnection']] = Field(
+        'PinnableItemConnection', alias='__typename'
+    )
+
+
+class PinnableItemEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[PinnableItem] = None
+    typename__: Optional[Literal['PinnableItemEdge']] = Field(
+        'PinnableItemEdge', alias='__typename'
+    )
+
+
+class PinnedDiscussion(Node, RepositoryNode):
+    """
+    A Pinned Discussion is a discussion pinned to a repository's index page.
+    """
+
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    discussion: Discussion
+    gradientStopColors: List[String]
+    id: ID
+    pattern: PinnedDiscussionPattern
+    pinnedBy: Actor
+    preconfiguredGradient: Optional[PinnedDiscussionGradient] = None
+    repository: Repository
+    updatedAt: DateTime
+    typename__: Optional[Literal['PinnedDiscussion']] = Field(
+        'PinnedDiscussion', alias='__typename'
+    )
+
+
+class PinnedDiscussionConnection(BaseModel):
+    """
+    The connection type for PinnedDiscussion.
+    """
+
+    edges: Optional[List[Optional[PinnedDiscussionEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[PinnedDiscussion]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['PinnedDiscussionConnection']] = Field(
+        'PinnedDiscussionConnection', alias='__typename'
+    )
+
+
+class PinnedDiscussionEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[PinnedDiscussion] = None
+    typename__: Optional[Literal['PinnedDiscussionEdge']] = Field(
+        'PinnedDiscussionEdge', alias='__typename'
+    )
+
+
+class PinnedEvent(Node):
+    """
+    Represents a 'pinned' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    issue: Issue
+    typename__: Optional[Literal['PinnedEvent']] = Field(
+        'PinnedEvent', alias='__typename'
+    )
+
+
+class PinnedIssue(Node):
+    """
+    A Pinned Issue is a issue pinned to a repository's index page.
+    """
+
+    databaseId: Optional[Int] = None
+    fullDatabaseId: Optional[BigInt] = None
+    id: ID
+    issue: Issue
+    pinnedBy: Actor
+    repository: Repository
+    typename__: Optional[Literal['PinnedIssue']] = Field(
+        'PinnedIssue', alias='__typename'
+    )
+
+
+class PinnedIssueConnection(BaseModel):
+    """
+    The connection type for PinnedIssue.
+    """
+
+    edges: Optional[List[Optional[PinnedIssueEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[PinnedIssue]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['PinnedIssueConnection']] = Field(
+        'PinnedIssueConnection', alias='__typename'
+    )
+
+
+class PinnedIssueEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[PinnedIssue] = None
+    typename__: Optional[Literal['PinnedIssueEdge']] = Field(
+        'PinnedIssueEdge', alias='__typename'
+    )
+
+
+class PrivateRepositoryForkingDisableAuditEntry(
+    AuditEntry,
+    EnterpriseAuditEntryData,
+    Node,
+    OrganizationAuditEntryData,
+    RepositoryAuditEntryData,
+):
+    """
+    Audit log entry for a private_repository_forking.disable event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    enterpriseResourcePath: Optional[URI] = None
+    enterpriseSlug: Optional[String] = None
+    enterpriseUrl: Optional[URI] = None
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['PrivateRepositoryForkingDisableAuditEntry']] = Field(
+        'PrivateRepositoryForkingDisableAuditEntry', alias='__typename'
+    )
+
+
+class PrivateRepositoryForkingEnableAuditEntry(
+    AuditEntry,
+    EnterpriseAuditEntryData,
+    Node,
+    OrganizationAuditEntryData,
+    RepositoryAuditEntryData,
+):
+    """
+    Audit log entry for a private_repository_forking.enable event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    enterpriseResourcePath: Optional[URI] = None
+    enterpriseSlug: Optional[String] = None
+    enterpriseUrl: Optional[URI] = None
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['PrivateRepositoryForkingEnableAuditEntry']] = Field(
+        'PrivateRepositoryForkingEnableAuditEntry', alias='__typename'
+    )
+
+
+class ProfileItemShowcase(BaseModel):
+    """
+    A curatable list of repositories relating to a repository owner, which defaults
+    to showing the most popular repositories they own.
+    """
+
+    hasPinnedItems: Boolean
+    items: PinnableItemConnection
+    typename__: Optional[Literal['ProfileItemShowcase']] = Field(
+        'ProfileItemShowcase', alias='__typename'
+    )
+
+
+class Project(Closable, Node, Updatable):
+    """
+    Projects manage issues, pull requests and notes within a project owner.
+    """
+
+    body: Optional[String] = None
+    bodyHTML: HTML
+    closed: Boolean
+    closedAt: Optional[DateTime] = None
+    columns: ProjectColumnConnection
+    createdAt: DateTime
+    creator: Optional[Actor] = None
+    databaseId: Optional[Int] = None
+    id: ID
+    name: String
+    number: Int
+    owner: ProjectOwner
+    pendingCards: ProjectCardConnection
+    progress: ProjectProgress
+    resourcePath: URI
+    state: ProjectState
+    updatedAt: DateTime
+    url: URI
+    viewerCanClose: Boolean
+    viewerCanReopen: Boolean
+    viewerCanUpdate: Boolean
+    typename__: Optional[Literal['Project']] = Field('Project', alias='__typename')
+
+
+class ProjectCard(Node):
+    """
+    A card in a project.
+    """
+
+    column: Optional[ProjectColumn] = None
+    content: Optional[ProjectCardItem] = None
+    createdAt: DateTime
+    creator: Optional[Actor] = None
+    databaseId: Optional[Int] = None
+    id: ID
+    isArchived: Boolean
+    note: Optional[String] = None
+    project: Project
+    resourcePath: URI
+    state: Optional[ProjectCardState] = None
+    updatedAt: DateTime
+    url: URI
+    typename__: Optional[Literal['ProjectCard']] = Field(
+        'ProjectCard', alias='__typename'
+    )
+
+
+class ProjectCardConnection(BaseModel):
+    """
+    The connection type for ProjectCard.
+    """
+
+    edges: Optional[List[Optional[ProjectCardEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[ProjectCard]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ProjectCardConnection']] = Field(
+        'ProjectCardConnection', alias='__typename'
+    )
+
+
+class ProjectCardEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[ProjectCard] = None
+    typename__: Optional[Literal['ProjectCardEdge']] = Field(
+        'ProjectCardEdge', alias='__typename'
+    )
+
+
+class ProjectColumn(Node):
+    """
+    A column inside a project.
+    """
+
+    cards: ProjectCardConnection
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    id: ID
+    name: String
+    project: Project
+    purpose: Optional[ProjectColumnPurpose] = None
+    resourcePath: URI
+    updatedAt: DateTime
+    url: URI
+    typename__: Optional[Literal['ProjectColumn']] = Field(
+        'ProjectColumn', alias='__typename'
+    )
+
+
+class ProjectColumnConnection(BaseModel):
+    """
+    The connection type for ProjectColumn.
+    """
+
+    edges: Optional[List[Optional[ProjectColumnEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[ProjectColumn]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ProjectColumnConnection']] = Field(
+        'ProjectColumnConnection', alias='__typename'
+    )
+
+
+class ProjectColumnEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[ProjectColumn] = None
+    typename__: Optional[Literal['ProjectColumnEdge']] = Field(
+        'ProjectColumnEdge', alias='__typename'
+    )
+
+
+class ProjectConnection(BaseModel):
+    """
+    A list of projects associated with the owner.
+    """
+
+    edges: Optional[List[Optional[ProjectEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Project]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ProjectConnection']] = Field(
+        'ProjectConnection', alias='__typename'
+    )
+
+
+class ProjectEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Project] = None
+    typename__: Optional[Literal['ProjectEdge']] = Field(
+        'ProjectEdge', alias='__typename'
+    )
+
+
+class ProjectProgress(BaseModel):
+    """
+    Project progress stats.
+    """
+
+    doneCount: Int
+    donePercentage: Float
+    enabled: Boolean
+    inProgressCount: Int
+    inProgressPercentage: Float
+    todoCount: Int
+    todoPercentage: Float
+    typename__: Optional[Literal['ProjectProgress']] = Field(
+        'ProjectProgress', alias='__typename'
+    )
+
+
+class ProjectV2(Closable, Node, Updatable):
+    """
+    New projects that manage issues, pull requests and drafts using tables and boards.
+    """
+
+    closed: Boolean
+    closedAt: Optional[DateTime] = None
+    createdAt: DateTime
+    creator: Optional[Actor] = None
+    databaseId: Optional[Int] = None
+    field: Optional[ProjectV2FieldConfiguration] = None
+    fields_: ProjectV2FieldConfigurationConnection = Field(..., alias='fields')
+    id: ID
+    items: ProjectV2ItemConnection
+    number: Int
+    owner: ProjectV2Owner
+    public: Boolean
+    readme: Optional[String] = None
+    repositories: RepositoryConnection
+    resourcePath: URI
+    shortDescription: Optional[String] = None
+    teams: TeamConnection
+    template: Boolean
+    title: String
+    updatedAt: DateTime
+    url: URI
+    view: Optional[ProjectV2View] = None
+    viewerCanClose: Boolean
+    viewerCanReopen: Boolean
+    viewerCanUpdate: Boolean
+    views: ProjectV2ViewConnection
+    workflow: Optional[ProjectV2Workflow] = None
+    workflows: ProjectV2WorkflowConnection
+    typename__: Optional[Literal['ProjectV2']] = Field('ProjectV2', alias='__typename')
+
+
+class ProjectV2ActorConnection(BaseModel):
+    """
+    The connection type for ProjectV2Actor.
+    """
+
+    edges: Optional[List[Optional[ProjectV2ActorEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[ProjectV2Actor]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ProjectV2ActorConnection']] = Field(
+        'ProjectV2ActorConnection', alias='__typename'
+    )
+
+
+class ProjectV2ActorEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[ProjectV2Actor] = None
+    typename__: Optional[Literal['ProjectV2ActorEdge']] = Field(
+        'ProjectV2ActorEdge', alias='__typename'
+    )
+
+
+class ProjectV2Connection(BaseModel):
+    """
+    The connection type for ProjectV2.
+    """
+
+    edges: Optional[List[Optional[ProjectV2Edge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[ProjectV2]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ProjectV2Connection']] = Field(
+        'ProjectV2Connection', alias='__typename'
+    )
+
+
+class ProjectV2Edge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[ProjectV2] = None
+    typename__: Optional[Literal['ProjectV2Edge']] = Field(
+        'ProjectV2Edge', alias='__typename'
+    )
+
+
+class ProjectV2Field(Node, ProjectV2FieldCommon):
+    """
+    A field inside a project.
+    """
+
+    createdAt: DateTime
+    dataType: ProjectV2FieldType
+    databaseId: Optional[Int] = None
+    id: ID
+    name: String
+    project: ProjectV2
+    updatedAt: DateTime
+    typename__: Optional[Literal['ProjectV2Field']] = Field(
+        'ProjectV2Field', alias='__typename'
+    )
+
+
+class ProjectV2FieldConfigurationConnection(BaseModel):
+    """
+    The connection type for ProjectV2FieldConfiguration.
+    """
+
+    edges: Optional[List[Optional[ProjectV2FieldConfigurationEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[ProjectV2FieldConfiguration]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ProjectV2FieldConfigurationConnection']] = Field(
+        'ProjectV2FieldConfigurationConnection', alias='__typename'
+    )
+
+
+class ProjectV2FieldConfigurationEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[ProjectV2FieldConfiguration] = None
+    typename__: Optional[Literal['ProjectV2FieldConfigurationEdge']] = Field(
+        'ProjectV2FieldConfigurationEdge', alias='__typename'
+    )
+
+
+class ProjectV2FieldConnection(BaseModel):
+    """
+    The connection type for ProjectV2Field.
+    """
+
+    edges: Optional[List[Optional[ProjectV2FieldEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[ProjectV2Field]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ProjectV2FieldConnection']] = Field(
+        'ProjectV2FieldConnection', alias='__typename'
+    )
+
+
+class ProjectV2FieldEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[ProjectV2Field] = None
+    typename__: Optional[Literal['ProjectV2FieldEdge']] = Field(
+        'ProjectV2FieldEdge', alias='__typename'
+    )
+
+
+class ProjectV2Item(Node):
+    """
+    An item within a Project.
+    """
+
+    content: Optional[ProjectV2ItemContent] = None
+    createdAt: DateTime
+    creator: Optional[Actor] = None
+    databaseId: Optional[Int] = None
+    fieldValueByName: Optional[ProjectV2ItemFieldValue] = None
+    fieldValues: ProjectV2ItemFieldValueConnection
+    id: ID
+    isArchived: Boolean
+    project: ProjectV2
+    type: ProjectV2ItemType
+    updatedAt: DateTime
+    typename__: Optional[Literal['ProjectV2Item']] = Field(
+        'ProjectV2Item', alias='__typename'
+    )
+
+
+class ProjectV2ItemConnection(BaseModel):
+    """
+    The connection type for ProjectV2Item.
+    """
+
+    edges: Optional[List[Optional[ProjectV2ItemEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[ProjectV2Item]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ProjectV2ItemConnection']] = Field(
+        'ProjectV2ItemConnection', alias='__typename'
+    )
+
+
+class ProjectV2ItemEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[ProjectV2Item] = None
+    typename__: Optional[Literal['ProjectV2ItemEdge']] = Field(
+        'ProjectV2ItemEdge', alias='__typename'
+    )
+
+
+class ProjectV2ItemFieldDateValue(Node, ProjectV2ItemFieldValueCommon):
+    """
+    The value of a date field in a Project item.
+    """
+
+    createdAt: DateTime
+    creator: Optional[Actor] = None
+    databaseId: Optional[Int] = None
+    date: Optional[Date] = None
+    field: ProjectV2FieldConfiguration
+    id: ID
+    item: ProjectV2Item
+    updatedAt: DateTime
+    typename__: Optional[Literal['ProjectV2ItemFieldDateValue']] = Field(
+        'ProjectV2ItemFieldDateValue', alias='__typename'
+    )
+
+
+class ProjectV2ItemFieldIterationValue(Node, ProjectV2ItemFieldValueCommon):
+    """
+    The value of an iteration field in a Project item.
+    """
+
+    createdAt: DateTime
+    creator: Optional[Actor] = None
+    databaseId: Optional[Int] = None
+    duration: Int
+    field: ProjectV2FieldConfiguration
+    id: ID
+    item: ProjectV2Item
+    iterationId: String
+    startDate: Date
+    title: String
+    titleHTML: String
+    updatedAt: DateTime
+    typename__: Optional[Literal['ProjectV2ItemFieldIterationValue']] = Field(
+        'ProjectV2ItemFieldIterationValue', alias='__typename'
+    )
+
+
+class ProjectV2ItemFieldLabelValue(BaseModel):
+    """
+    The value of the labels field in a Project item.
+    """
+
+    field: ProjectV2FieldConfiguration
+    labels: Optional[LabelConnection] = None
+    typename__: Optional[Literal['ProjectV2ItemFieldLabelValue']] = Field(
+        'ProjectV2ItemFieldLabelValue', alias='__typename'
+    )
+
+
+class ProjectV2ItemFieldMilestoneValue(BaseModel):
+    """
+    The value of a milestone field in a Project item.
+    """
+
+    field: ProjectV2FieldConfiguration
+    milestone: Optional[Milestone] = None
+    typename__: Optional[Literal['ProjectV2ItemFieldMilestoneValue']] = Field(
+        'ProjectV2ItemFieldMilestoneValue', alias='__typename'
+    )
+
+
+class ProjectV2ItemFieldNumberValue(Node, ProjectV2ItemFieldValueCommon):
+    """
+    The value of a number field in a Project item.
+    """
+
+    createdAt: DateTime
+    creator: Optional[Actor] = None
+    databaseId: Optional[Int] = None
+    field: ProjectV2FieldConfiguration
+    id: ID
+    item: ProjectV2Item
+    number: Optional[Float] = None
+    updatedAt: DateTime
+    typename__: Optional[Literal['ProjectV2ItemFieldNumberValue']] = Field(
+        'ProjectV2ItemFieldNumberValue', alias='__typename'
+    )
+
+
+class ProjectV2ItemFieldPullRequestValue(BaseModel):
+    """
+    The value of a pull request field in a Project item.
+    """
+
+    field: ProjectV2FieldConfiguration
+    pullRequests: Optional[PullRequestConnection] = None
+    typename__: Optional[Literal['ProjectV2ItemFieldPullRequestValue']] = Field(
+        'ProjectV2ItemFieldPullRequestValue', alias='__typename'
+    )
+
+
+class ProjectV2ItemFieldRepositoryValue(BaseModel):
+    """
+    The value of a repository field in a Project item.
+    """
+
+    field: ProjectV2FieldConfiguration
+    repository: Optional[Repository] = None
+    typename__: Optional[Literal['ProjectV2ItemFieldRepositoryValue']] = Field(
+        'ProjectV2ItemFieldRepositoryValue', alias='__typename'
+    )
+
+
+class ProjectV2ItemFieldReviewerValue(BaseModel):
+    """
+    The value of a reviewers field in a Project item.
+    """
+
+    field: ProjectV2FieldConfiguration
+    reviewers: Optional[RequestedReviewerConnection] = None
+    typename__: Optional[Literal['ProjectV2ItemFieldReviewerValue']] = Field(
+        'ProjectV2ItemFieldReviewerValue', alias='__typename'
+    )
+
+
+class ProjectV2ItemFieldSingleSelectValue(Node, ProjectV2ItemFieldValueCommon):
+    """
+    The value of a single select field in a Project item.
+    """
+
+    color: ProjectV2SingleSelectFieldOptionColor
+    createdAt: DateTime
+    creator: Optional[Actor] = None
+    databaseId: Optional[Int] = None
+    description: Optional[String] = None
+    descriptionHTML: Optional[String] = None
+    field: ProjectV2FieldConfiguration
+    id: ID
+    item: ProjectV2Item
+    name: Optional[String] = None
+    nameHTML: Optional[String] = None
+    optionId: Optional[String] = None
+    updatedAt: DateTime
+    typename__: Optional[Literal['ProjectV2ItemFieldSingleSelectValue']] = Field(
+        'ProjectV2ItemFieldSingleSelectValue', alias='__typename'
+    )
+
+
+class ProjectV2ItemFieldTextValue(Node, ProjectV2ItemFieldValueCommon):
+    """
+    The value of a text field in a Project item.
+    """
+
+    createdAt: DateTime
+    creator: Optional[Actor] = None
+    databaseId: Optional[Int] = None
+    field: ProjectV2FieldConfiguration
+    id: ID
+    item: ProjectV2Item
+    text: Optional[String] = None
+    updatedAt: DateTime
+    typename__: Optional[Literal['ProjectV2ItemFieldTextValue']] = Field(
+        'ProjectV2ItemFieldTextValue', alias='__typename'
+    )
+
+
+class ProjectV2ItemFieldUserValue(BaseModel):
+    """
+    The value of a user field in a Project item.
+    """
+
+    field: ProjectV2FieldConfiguration
+    users: Optional[UserConnection] = None
+    typename__: Optional[Literal['ProjectV2ItemFieldUserValue']] = Field(
+        'ProjectV2ItemFieldUserValue', alias='__typename'
+    )
+
+
+class ProjectV2ItemFieldValueConnection(BaseModel):
+    """
+    The connection type for ProjectV2ItemFieldValue.
+    """
+
+    edges: Optional[List[Optional[ProjectV2ItemFieldValueEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[ProjectV2ItemFieldValue]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ProjectV2ItemFieldValueConnection']] = Field(
+        'ProjectV2ItemFieldValueConnection', alias='__typename'
+    )
+
+
+class ProjectV2ItemFieldValueEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[ProjectV2ItemFieldValue] = None
+    typename__: Optional[Literal['ProjectV2ItemFieldValueEdge']] = Field(
+        'ProjectV2ItemFieldValueEdge', alias='__typename'
+    )
+
+
+class ProjectV2IterationField(Node, ProjectV2FieldCommon):
+    """
+    An iteration field inside a project.
+    """
+
+    configuration: ProjectV2IterationFieldConfiguration
+    createdAt: DateTime
+    dataType: ProjectV2FieldType
+    databaseId: Optional[Int] = None
+    id: ID
+    name: String
+    project: ProjectV2
+    updatedAt: DateTime
+    typename__: Optional[Literal['ProjectV2IterationField']] = Field(
+        'ProjectV2IterationField', alias='__typename'
+    )
+
+
+class ProjectV2IterationFieldConfiguration(BaseModel):
+    """
+    Iteration field configuration for a project.
+    """
+
+    completedIterations: List[ProjectV2IterationFieldIteration]
+    duration: Int
+    iterations: List[ProjectV2IterationFieldIteration]
+    startDay: Int
+    typename__: Optional[Literal['ProjectV2IterationFieldConfiguration']] = Field(
+        'ProjectV2IterationFieldConfiguration', alias='__typename'
+    )
+
+
+class ProjectV2IterationFieldIteration(BaseModel):
+    """
+    Iteration field iteration settings for a project.
+    """
+
+    duration: Int
+    id: String
+    startDate: Date
+    title: String
+    titleHTML: String
+    typename__: Optional[Literal['ProjectV2IterationFieldIteration']] = Field(
+        'ProjectV2IterationFieldIteration', alias='__typename'
+    )
+
+
+class ProjectV2SingleSelectField(Node, ProjectV2FieldCommon):
+    """
+    A single select field inside a project.
+    """
+
+    createdAt: DateTime
+    dataType: ProjectV2FieldType
+    databaseId: Optional[Int] = None
+    id: ID
+    name: String
+    options: List[ProjectV2SingleSelectFieldOption]
+    project: ProjectV2
+    updatedAt: DateTime
+    typename__: Optional[Literal['ProjectV2SingleSelectField']] = Field(
+        'ProjectV2SingleSelectField', alias='__typename'
+    )
+
+
+class ProjectV2SingleSelectFieldOption(BaseModel):
+    """
+    Single select field option for a configuration for a project.
+    """
+
+    color: ProjectV2SingleSelectFieldOptionColor
+    description: String
+    descriptionHTML: String
+    id: String
+    name: String
+    nameHTML: String
+    typename__: Optional[Literal['ProjectV2SingleSelectFieldOption']] = Field(
+        'ProjectV2SingleSelectFieldOption', alias='__typename'
+    )
+
+
+class ProjectV2SortBy(BaseModel):
+    """
+    Represents a sort by field and direction.
+    """
+
+    direction: OrderDirection
+    field: ProjectV2Field
+    typename__: Optional[Literal['ProjectV2SortBy']] = Field(
+        'ProjectV2SortBy', alias='__typename'
+    )
+
+
+class ProjectV2SortByConnection(BaseModel):
+    """
+    The connection type for ProjectV2SortBy.
+    """
+
+    edges: Optional[List[Optional[ProjectV2SortByEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[ProjectV2SortBy]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ProjectV2SortByConnection']] = Field(
+        'ProjectV2SortByConnection', alias='__typename'
+    )
+
+
+class ProjectV2SortByEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[ProjectV2SortBy] = None
+    typename__: Optional[Literal['ProjectV2SortByEdge']] = Field(
+        'ProjectV2SortByEdge', alias='__typename'
+    )
+
+
+class ProjectV2SortByField(BaseModel):
+    """
+    Represents a sort by field and direction.
+    """
+
+    direction: OrderDirection
+    field: ProjectV2FieldConfiguration
+    typename__: Optional[Literal['ProjectV2SortByField']] = Field(
+        'ProjectV2SortByField', alias='__typename'
+    )
+
+
+class ProjectV2SortByFieldConnection(BaseModel):
+    """
+    The connection type for ProjectV2SortByField.
+    """
+
+    edges: Optional[List[Optional[ProjectV2SortByFieldEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[ProjectV2SortByField]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ProjectV2SortByFieldConnection']] = Field(
+        'ProjectV2SortByFieldConnection', alias='__typename'
+    )
+
+
+class ProjectV2SortByFieldEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[ProjectV2SortByField] = None
+    typename__: Optional[Literal['ProjectV2SortByFieldEdge']] = Field(
+        'ProjectV2SortByFieldEdge', alias='__typename'
+    )
+
+
+class ProjectV2View(Node):
+    """
+    A view within a ProjectV2.
+    """
+
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    fields_: Optional[ProjectV2FieldConfigurationConnection] = Field(
+        None, alias='fields'
+    )
+    filter: Optional[String] = None
+    groupBy: Optional[ProjectV2FieldConnection] = None
+    groupByFields: Optional[ProjectV2FieldConfigurationConnection] = None
+    id: ID
+    layout: ProjectV2ViewLayout
+    name: String
+    number: Int
+    project: ProjectV2
+    sortBy: Optional[ProjectV2SortByConnection] = None
+    sortByFields: Optional[ProjectV2SortByFieldConnection] = None
+    updatedAt: DateTime
+    verticalGroupBy: Optional[ProjectV2FieldConnection] = None
+    verticalGroupByFields: Optional[ProjectV2FieldConfigurationConnection] = None
+    visibleFields: Optional[ProjectV2FieldConnection] = None
+    typename__: Optional[Literal['ProjectV2View']] = Field(
+        'ProjectV2View', alias='__typename'
+    )
+
+
+class ProjectV2ViewConnection(BaseModel):
+    """
+    The connection type for ProjectV2View.
+    """
+
+    edges: Optional[List[Optional[ProjectV2ViewEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[ProjectV2View]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ProjectV2ViewConnection']] = Field(
+        'ProjectV2ViewConnection', alias='__typename'
+    )
+
+
+class ProjectV2ViewEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[ProjectV2View] = None
+    typename__: Optional[Literal['ProjectV2ViewEdge']] = Field(
+        'ProjectV2ViewEdge', alias='__typename'
+    )
+
+
+class ProjectV2Workflow(Node):
+    """
+    A workflow inside a project.
+    """
+
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    enabled: Boolean
+    id: ID
+    name: String
+    number: Int
+    project: ProjectV2
+    updatedAt: DateTime
+    typename__: Optional[Literal['ProjectV2Workflow']] = Field(
+        'ProjectV2Workflow', alias='__typename'
+    )
+
+
+class ProjectV2WorkflowConnection(BaseModel):
+    """
+    The connection type for ProjectV2Workflow.
+    """
+
+    edges: Optional[List[Optional[ProjectV2WorkflowEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[ProjectV2Workflow]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ProjectV2WorkflowConnection']] = Field(
+        'ProjectV2WorkflowConnection', alias='__typename'
+    )
+
+
+class ProjectV2WorkflowEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[ProjectV2Workflow] = None
+    typename__: Optional[Literal['ProjectV2WorkflowEdge']] = Field(
+        'ProjectV2WorkflowEdge', alias='__typename'
+    )
+
+
+class PublicKey(Node):
+    """
+    A user's public key.
+    """
+
+    accessedAt: Optional[DateTime] = None
+    createdAt: Optional[DateTime] = None
+    fingerprint: String
+    id: ID
+    isReadOnly: Optional[Boolean] = None
+    key: String
+    updatedAt: Optional[DateTime] = None
+    typename__: Optional[Literal['PublicKey']] = Field('PublicKey', alias='__typename')
+
+
+class PublicKeyConnection(BaseModel):
+    """
+    The connection type for PublicKey.
+    """
+
+    edges: Optional[List[Optional[PublicKeyEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[PublicKey]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['PublicKeyConnection']] = Field(
+        'PublicKeyConnection', alias='__typename'
+    )
+
+
+class PublicKeyEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[PublicKey] = None
+    typename__: Optional[Literal['PublicKeyEdge']] = Field(
+        'PublicKeyEdge', alias='__typename'
+    )
+
+
+class PublishSponsorsTierPayload(BaseModel):
+    """
+    Autogenerated return type of PublishSponsorsTier
+    """
+
+    clientMutationId: Optional[String] = None
+    sponsorsTier: Optional[SponsorsTier] = None
+    typename__: Optional[Literal['PublishSponsorsTierPayload']] = Field(
+        'PublishSponsorsTierPayload', alias='__typename'
+    )
+
+
+class PullRequest(
+    Assignable,
+    Closable,
+    Comment,
+    Labelable,
+    Lockable,
+    Node,
+    ProjectV2Owner,
+    Reactable,
+    RepositoryNode,
+    Subscribable,
+    UniformResourceLocatable,
+    Updatable,
+    UpdatableComment,
+):
+    """
+    A repository pull request.
+    """
+
+    activeLockReason: Optional[LockReason] = None
+    additions: Int
+    assignees: UserConnection
+    author: Optional[Actor] = None
+    authorAssociation: CommentAuthorAssociation
+    autoMergeRequest: Optional[AutoMergeRequest] = None
+    baseRef: Optional[Ref] = None
+    baseRefName: String
+    baseRefOid: GitObjectID
+    baseRepository: Optional[Repository] = None
+    body: String
+    bodyHTML: HTML
+    bodyText: String
+    canBeRebased: Boolean
+    changedFiles: Int
+    checksResourcePath: URI
+    checksUrl: URI
+    closed: Boolean
+    closedAt: Optional[DateTime] = None
+    closingIssuesReferences: Optional[IssueConnection] = None
+    comments: IssueCommentConnection
+    commits: PullRequestCommitConnection
+    createdAt: DateTime
+    createdViaEmail: Boolean
+    databaseId: Optional[Int] = None
+    deletions: Int
+    editor: Optional[Actor] = None
+    files: Optional[PullRequestChangedFileConnection] = None
+    headRef: Optional[Ref] = None
+    headRefName: String
+    headRefOid: GitObjectID
+    headRepository: Optional[Repository] = None
+    headRepositoryOwner: Optional[RepositoryOwner] = None
+    hovercard: Hovercard
+    id: ID
+    includesCreatedEdit: Boolean
+    isCrossRepository: Boolean
+    isDraft: Boolean
+    isReadByViewer: Optional[Boolean] = None
+    labels: Optional[LabelConnection] = None
+    lastEditedAt: Optional[DateTime] = None
+    latestOpinionatedReviews: Optional[PullRequestReviewConnection] = None
+    latestReviews: Optional[PullRequestReviewConnection] = None
+    locked: Boolean
+    maintainerCanModify: Boolean
+    mergeCommit: Optional[Commit] = None
+    mergeQueueEntry: Optional[MergeQueueEntry] = None
+    mergeStateStatus: MergeStateStatus
+    mergeable: MergeableState
+    merged: Boolean
+    mergedAt: Optional[DateTime] = None
+    mergedBy: Optional[Actor] = None
+    milestone: Optional[Milestone] = None
+    number: Int
+    participants: UserConnection
+    permalink: URI
+    potentialMergeCommit: Optional[Commit] = None
+    projectCards: ProjectCardConnection
+    projectItems: ProjectV2ItemConnection
+    projectV2: Optional[ProjectV2] = None
+    projectsV2: ProjectV2Connection
+    publishedAt: Optional[DateTime] = None
+    reactionGroups: Optional[List[ReactionGroup]] = Field(default_factory=list)
+    reactions: ReactionConnection
+    repository: Repository
+    resourcePath: URI
+    revertResourcePath: URI
+    revertUrl: URI
+    reviewDecision: Optional[PullRequestReviewDecision] = None
+    reviewRequests: Optional[ReviewRequestConnection] = None
+    reviewThreads: PullRequestReviewThreadConnection
+    reviews: Optional[PullRequestReviewConnection] = None
+    state: PullRequestState
+    suggestedReviewers: List[Optional[SuggestedReviewer]]
+    timeline: PullRequestTimelineConnection
+    timelineItems: PullRequestTimelineItemsConnection
+    title: String
+    titleHTML: HTML
+    totalCommentsCount: Optional[Int] = None
+    updatedAt: DateTime
+    url: URI
+    userContentEdits: Optional[UserContentEditConnection] = None
+    viewerCanApplySuggestion: Boolean
+    viewerCanClose: Boolean
+    viewerCanDeleteHeadRef: Boolean
+    viewerCanDisableAutoMerge: Boolean
+    viewerCanEditFiles: Boolean
+    viewerCanEnableAutoMerge: Boolean
+    viewerCanMergeAsAdmin: Boolean
+    viewerCanReact: Boolean
+    viewerCanReopen: Boolean
+    viewerCanSubscribe: Boolean
+    viewerCanUpdate: Boolean
+    viewerCanUpdateBranch: Boolean
+    viewerCannotUpdateReasons: List[CommentCannotUpdateReason]
+    viewerDidAuthor: Boolean
+    viewerLatestReview: Optional[PullRequestReview] = None
+    viewerLatestReviewRequest: Optional[ReviewRequest] = None
+    viewerMergeBodyText: String
+    viewerMergeHeadlineText: String
+    viewerSubscription: Optional[SubscriptionState] = None
+    typename__: Optional[Literal['PullRequest']] = Field(
+        'PullRequest', alias='__typename'
+    )
+
+
+class PullRequestChangedFile(BaseModel):
+    """
+    A file changed in a pull request.
+    """
+
+    additions: Int
+    changeType: PatchStatus
+    deletions: Int
+    path: String
+    viewerViewedState: FileViewedState
+    typename__: Optional[Literal['PullRequestChangedFile']] = Field(
+        'PullRequestChangedFile', alias='__typename'
+    )
+
+
+class PullRequestChangedFileConnection(BaseModel):
+    """
+    The connection type for PullRequestChangedFile.
+    """
+
+    edges: Optional[List[Optional[PullRequestChangedFileEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[PullRequestChangedFile]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['PullRequestChangedFileConnection']] = Field(
+        'PullRequestChangedFileConnection', alias='__typename'
+    )
+
+
+class PullRequestChangedFileEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[PullRequestChangedFile] = None
+    typename__: Optional[Literal['PullRequestChangedFileEdge']] = Field(
+        'PullRequestChangedFileEdge', alias='__typename'
+    )
+
+
+class PullRequestCommit(Node, UniformResourceLocatable):
+    """
+    Represents a Git commit part of a pull request.
+    """
+
+    commit: Commit
+    id: ID
+    pullRequest: PullRequest
+    resourcePath: URI
+    url: URI
+    typename__: Optional[Literal['PullRequestCommit']] = Field(
+        'PullRequestCommit', alias='__typename'
+    )
+
+
+class PullRequestCommitCommentThread(Node, RepositoryNode):
+    """
+    Represents a commit comment thread part of a pull request.
+    """
+
+    comments: CommitCommentConnection
+    commit: Commit
+    id: ID
+    path: Optional[String] = None
+    position: Optional[Int] = None
+    pullRequest: PullRequest
+    repository: Repository
+    typename__: Optional[Literal['PullRequestCommitCommentThread']] = Field(
+        'PullRequestCommitCommentThread', alias='__typename'
+    )
+
+
+class PullRequestCommitConnection(BaseModel):
+    """
+    The connection type for PullRequestCommit.
+    """
+
+    edges: Optional[List[Optional[PullRequestCommitEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[PullRequestCommit]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['PullRequestCommitConnection']] = Field(
+        'PullRequestCommitConnection', alias='__typename'
+    )
+
+
+class PullRequestCommitEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[PullRequestCommit] = None
+    typename__: Optional[Literal['PullRequestCommitEdge']] = Field(
+        'PullRequestCommitEdge', alias='__typename'
+    )
+
+
+class PullRequestConnection(BaseModel):
+    """
+    The connection type for PullRequest.
+    """
+
+    edges: Optional[List[Optional[PullRequestEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[PullRequest]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['PullRequestConnection']] = Field(
+        'PullRequestConnection', alias='__typename'
+    )
+
+
+class PullRequestContributionsByRepository(BaseModel):
+    """
+    This aggregates pull requests opened by a user within one repository.
+    """
+
+    contributions: CreatedPullRequestContributionConnection
+    repository: Repository
+    typename__: Optional[Literal['PullRequestContributionsByRepository']] = Field(
+        'PullRequestContributionsByRepository', alias='__typename'
+    )
+
+
+class PullRequestEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[PullRequest] = None
+    typename__: Optional[Literal['PullRequestEdge']] = Field(
+        'PullRequestEdge', alias='__typename'
+    )
+
+
+class PullRequestParameters(BaseModel):
+    """
+    Require all commits be made to a non-target branch and submitted via a pull request before they can be merged.
+    """
+
+    dismissStaleReviewsOnPush: Boolean
+    requireCodeOwnerReview: Boolean
+    requireLastPushApproval: Boolean
+    requiredApprovingReviewCount: Int
+    requiredReviewThreadResolution: Boolean
+    typename__: Optional[Literal['PullRequestParameters']] = Field(
+        'PullRequestParameters', alias='__typename'
+    )
+
+
+class PullRequestReview(
+    Comment,
+    Deletable,
+    Minimizable,
+    Node,
+    Reactable,
+    RepositoryNode,
+    Updatable,
+    UpdatableComment,
+):
+    """
+    A review object for a given pull request.
+    """
+
+    author: Optional[Actor] = None
+    authorAssociation: CommentAuthorAssociation
+    authorCanPushToRepository: Boolean
+    body: String
+    bodyHTML: HTML
+    bodyText: String
+    comments: PullRequestReviewCommentConnection
+    commit: Optional[Commit] = None
+    createdAt: DateTime
+    createdViaEmail: Boolean
+    databaseId: Optional[Int] = None
+    editor: Optional[Actor] = None
+    id: ID
+    includesCreatedEdit: Boolean
+    isMinimized: Boolean
+    lastEditedAt: Optional[DateTime] = None
+    minimizedReason: Optional[String] = None
+    onBehalfOf: TeamConnection
+    publishedAt: Optional[DateTime] = None
+    pullRequest: PullRequest
+    reactionGroups: Optional[List[ReactionGroup]] = Field(default_factory=list)
+    reactions: ReactionConnection
+    repository: Repository
+    resourcePath: URI
+    state: PullRequestReviewState
+    submittedAt: Optional[DateTime] = None
+    updatedAt: DateTime
+    url: URI
+    userContentEdits: Optional[UserContentEditConnection] = None
+    viewerCanDelete: Boolean
+    viewerCanMinimize: Boolean
+    viewerCanReact: Boolean
+    viewerCanUpdate: Boolean
+    viewerCannotUpdateReasons: List[CommentCannotUpdateReason]
+    viewerDidAuthor: Boolean
+    typename__: Optional[Literal['PullRequestReview']] = Field(
+        'PullRequestReview', alias='__typename'
+    )
+
+
+class PullRequestReviewComment(
+    Comment,
+    Deletable,
+    Minimizable,
+    Node,
+    Reactable,
+    RepositoryNode,
+    Updatable,
+    UpdatableComment,
+):
+    """
+    A review comment associated with a given repository pull request.
+    """
+
+    author: Optional[Actor] = None
+    authorAssociation: CommentAuthorAssociation
+    body: String
+    bodyHTML: HTML
+    bodyText: String
+    commit: Optional[Commit] = None
+    createdAt: DateTime
+    createdViaEmail: Boolean
+    databaseId: Optional[Int] = None
+    diffHunk: String
+    draftedAt: DateTime
+    editor: Optional[Actor] = None
+    id: ID
+    includesCreatedEdit: Boolean
+    isMinimized: Boolean
+    lastEditedAt: Optional[DateTime] = None
+    line: Optional[Int] = None
+    minimizedReason: Optional[String] = None
+    originalCommit: Optional[Commit] = None
+    originalLine: Optional[Int] = None
+    originalPosition: Int
+    originalStartLine: Optional[Int] = None
+    outdated: Boolean
+    path: String
+    position: Optional[Int] = None
+    publishedAt: Optional[DateTime] = None
+    pullRequest: PullRequest
+    pullRequestReview: Optional[PullRequestReview] = None
+    reactionGroups: Optional[List[ReactionGroup]] = Field(default_factory=list)
+    reactions: ReactionConnection
+    replyTo: Optional[PullRequestReviewComment] = None
+    repository: Repository
+    resourcePath: URI
+    startLine: Optional[Int] = None
+    state: PullRequestReviewCommentState
+    subjectType: PullRequestReviewThreadSubjectType
+    updatedAt: DateTime
+    url: URI
+    userContentEdits: Optional[UserContentEditConnection] = None
+    viewerCanDelete: Boolean
+    viewerCanMinimize: Boolean
+    viewerCanReact: Boolean
+    viewerCanUpdate: Boolean
+    viewerCannotUpdateReasons: List[CommentCannotUpdateReason]
+    viewerDidAuthor: Boolean
+    typename__: Optional[Literal['PullRequestReviewComment']] = Field(
+        'PullRequestReviewComment', alias='__typename'
+    )
+
+
+class PullRequestReviewCommentConnection(BaseModel):
+    """
+    The connection type for PullRequestReviewComment.
+    """
+
+    edges: Optional[List[Optional[PullRequestReviewCommentEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[PullRequestReviewComment]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['PullRequestReviewCommentConnection']] = Field(
+        'PullRequestReviewCommentConnection', alias='__typename'
+    )
+
+
+class PullRequestReviewCommentEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[PullRequestReviewComment] = None
+    typename__: Optional[Literal['PullRequestReviewCommentEdge']] = Field(
+        'PullRequestReviewCommentEdge', alias='__typename'
+    )
+
+
+class PullRequestReviewConnection(BaseModel):
+    """
+    The connection type for PullRequestReview.
+    """
+
+    edges: Optional[List[Optional[PullRequestReviewEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[PullRequestReview]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['PullRequestReviewConnection']] = Field(
+        'PullRequestReviewConnection', alias='__typename'
+    )
+
+
+class PullRequestReviewContributionsByRepository(BaseModel):
+    """
+    This aggregates pull request reviews made by a user within one repository.
+    """
+
+    contributions: CreatedPullRequestReviewContributionConnection
+    repository: Repository
+    typename__: Optional[Literal['PullRequestReviewContributionsByRepository']] = Field(
+        'PullRequestReviewContributionsByRepository', alias='__typename'
+    )
+
+
+class PullRequestReviewEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[PullRequestReview] = None
+    typename__: Optional[Literal['PullRequestReviewEdge']] = Field(
+        'PullRequestReviewEdge', alias='__typename'
+    )
+
+
+class PullRequestReviewThread(Node):
+    """
+    A threaded list of comments for a given pull request.
+    """
+
+    comments: PullRequestReviewCommentConnection
+    diffSide: DiffSide
+    id: ID
+    isCollapsed: Boolean
+    isOutdated: Boolean
+    isResolved: Boolean
+    line: Optional[Int] = None
+    originalLine: Optional[Int] = None
+    originalStartLine: Optional[Int] = None
+    path: String
+    pullRequest: PullRequest
+    repository: Repository
+    resolvedBy: Optional[User] = None
+    startDiffSide: Optional[DiffSide] = None
+    startLine: Optional[Int] = None
+    subjectType: PullRequestReviewThreadSubjectType
+    viewerCanReply: Boolean
+    viewerCanResolve: Boolean
+    viewerCanUnresolve: Boolean
+    typename__: Optional[Literal['PullRequestReviewThread']] = Field(
+        'PullRequestReviewThread', alias='__typename'
+    )
+
+
+class PullRequestReviewThreadConnection(BaseModel):
+    """
+    Review comment threads for a pull request review.
+    """
+
+    edges: Optional[List[Optional[PullRequestReviewThreadEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[PullRequestReviewThread]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['PullRequestReviewThreadConnection']] = Field(
+        'PullRequestReviewThreadConnection', alias='__typename'
+    )
+
+
+class PullRequestReviewThreadEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[PullRequestReviewThread] = None
+    typename__: Optional[Literal['PullRequestReviewThreadEdge']] = Field(
+        'PullRequestReviewThreadEdge', alias='__typename'
+    )
+
+
+class PullRequestRevisionMarker(BaseModel):
+    """
+    Represents the latest point in the pull request timeline for which the viewer has seen the pull request's commits.
+    """
+
+    createdAt: DateTime
+    lastSeenCommit: Commit
+    pullRequest: PullRequest
+    typename__: Optional[Literal['PullRequestRevisionMarker']] = Field(
+        'PullRequestRevisionMarker', alias='__typename'
+    )
+
+
+class PullRequestTemplate(BaseModel):
+    """
+    A repository pull request template.
+    """
+
+    body: Optional[String] = None
+    filename: Optional[String] = None
+    repository: Repository
+    typename__: Optional[Literal['PullRequestTemplate']] = Field(
+        'PullRequestTemplate', alias='__typename'
+    )
+
+
+class PullRequestThread(Node):
+    """
+    A threaded list of comments for a given pull request.
+    """
+
+    comments: PullRequestReviewCommentConnection
+    diffSide: DiffSide
+    id: ID
+    isCollapsed: Boolean
+    isOutdated: Boolean
+    isResolved: Boolean
+    line: Optional[Int] = None
+    path: String
+    pullRequest: PullRequest
+    repository: Repository
+    resolvedBy: Optional[User] = None
+    startDiffSide: Optional[DiffSide] = None
+    startLine: Optional[Int] = None
+    subjectType: PullRequestReviewThreadSubjectType
+    viewerCanReply: Boolean
+    viewerCanResolve: Boolean
+    viewerCanUnresolve: Boolean
+    typename__: Optional[Literal['PullRequestThread']] = Field(
+        'PullRequestThread', alias='__typename'
+    )
+
+
+class PullRequestTimelineConnection(BaseModel):
+    """
+    The connection type for PullRequestTimelineItem.
+    """
+
+    edges: Optional[List[Optional[PullRequestTimelineItemEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[PullRequestTimelineItem]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['PullRequestTimelineConnection']] = Field(
+        'PullRequestTimelineConnection', alias='__typename'
+    )
+
+
+class PullRequestTimelineItemEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[PullRequestTimelineItem] = None
+    typename__: Optional[Literal['PullRequestTimelineItemEdge']] = Field(
+        'PullRequestTimelineItemEdge', alias='__typename'
+    )
+
+
+class PullRequestTimelineItemsConnection(BaseModel):
+    """
+    The connection type for PullRequestTimelineItems.
+    """
+
+    edges: Optional[List[Optional[PullRequestTimelineItemsEdge]]] = Field(
+        default_factory=list
+    )
+    filteredCount: Int
+    nodes: Optional[List[Optional[PullRequestTimelineItems]]] = Field(
+        default_factory=list
+    )
+    pageCount: Int
+    pageInfo: PageInfo
+    totalCount: Int
+    updatedAt: DateTime
+    typename__: Optional[Literal['PullRequestTimelineItemsConnection']] = Field(
+        'PullRequestTimelineItemsConnection', alias='__typename'
+    )
+
+
+class PullRequestTimelineItemsEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[PullRequestTimelineItems] = None
+    typename__: Optional[Literal['PullRequestTimelineItemsEdge']] = Field(
+        'PullRequestTimelineItemsEdge', alias='__typename'
+    )
+
+
+class Push(Node):
+    """
+    A Git push.
+    """
+
+    id: ID
+    nextSha: Optional[GitObjectID] = None
+    permalink: URI
+    previousSha: Optional[GitObjectID] = None
+    pusher: Actor
+    repository: Repository
+    typename__: Optional[Literal['Push']] = Field('Push', alias='__typename')
+
+
+class PushAllowance(Node):
+    """
+    A team, user, or app who has the ability to push to a protected branch.
+    """
+
+    actor: Optional[PushAllowanceActor] = None
+    branchProtectionRule: Optional[BranchProtectionRule] = None
+    id: ID
+    typename__: Optional[Literal['PushAllowance']] = Field(
+        'PushAllowance', alias='__typename'
+    )
+
+
+class PushAllowanceConnection(BaseModel):
+    """
+    The connection type for PushAllowance.
+    """
+
+    edges: Optional[List[Optional[PushAllowanceEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[PushAllowance]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['PushAllowanceConnection']] = Field(
+        'PushAllowanceConnection', alias='__typename'
+    )
+
+
+class PushAllowanceEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[PushAllowance] = None
+    typename__: Optional[Literal['PushAllowanceEdge']] = Field(
+        'PushAllowanceEdge', alias='__typename'
+    )
+
+
+class RateLimit(BaseModel):
+    """
+    Represents the client's rate limit.
+    """
+
+    cost: Int
+    limit: Int
+    nodeCount: Int
+    remaining: Int
+    resetAt: DateTime
+    used: Int
+    typename__: Optional[Literal['RateLimit']] = Field('RateLimit', alias='__typename')
+
+
+class ReactingUserConnection(BaseModel):
+    """
+    The connection type for User.
+    """
+
+    edges: Optional[List[Optional[ReactingUserEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[User]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ReactingUserConnection']] = Field(
+        'ReactingUserConnection', alias='__typename'
+    )
+
+
+class ReactingUserEdge(BaseModel):
+    """
+    Represents a user that's made a reaction.
+    """
+
+    cursor: String
+    node: User
+    reactedAt: DateTime
+    typename__: Optional[Literal['ReactingUserEdge']] = Field(
+        'ReactingUserEdge', alias='__typename'
+    )
+
+
+class Reaction(Node):
+    """
+    An emoji reaction to a particular piece of content.
+    """
+
+    content: ReactionContent
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    id: ID
+    reactable: Reactable
+    user: Optional[User] = None
+    typename__: Optional[Literal['Reaction']] = Field('Reaction', alias='__typename')
+
+
+class ReactionConnection(BaseModel):
+    """
+    A list of reactions that have been left on the subject.
+    """
+
+    edges: Optional[List[Optional[ReactionEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Reaction]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    viewerHasReacted: Boolean
+    typename__: Optional[Literal['ReactionConnection']] = Field(
+        'ReactionConnection', alias='__typename'
+    )
+
+
+class ReactionEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Reaction] = None
+    typename__: Optional[Literal['ReactionEdge']] = Field(
+        'ReactionEdge', alias='__typename'
+    )
+
+
+class ReactionGroup(BaseModel):
+    """
+    A group of emoji reactions to a particular piece of content.
+    """
+
+    content: ReactionContent
+    createdAt: Optional[DateTime] = None
+    reactors: ReactorConnection
+    subject: Reactable
+    users: ReactingUserConnection
+    viewerHasReacted: Boolean
+    typename__: Optional[Literal['ReactionGroup']] = Field(
+        'ReactionGroup', alias='__typename'
+    )
+
+
+class ReactorConnection(BaseModel):
+    """
+    The connection type for Reactor.
+    """
+
+    edges: Optional[List[Optional[ReactorEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Reactor]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ReactorConnection']] = Field(
+        'ReactorConnection', alias='__typename'
+    )
+
+
+class ReactorEdge(BaseModel):
+    """
+    Represents an author of a reaction.
+    """
+
+    cursor: String
+    node: Reactor
+    reactedAt: DateTime
+    typename__: Optional[Literal['ReactorEdge']] = Field(
+        'ReactorEdge', alias='__typename'
+    )
+
+
+class ReadyForReviewEvent(Node, UniformResourceLocatable):
+    """
+    Represents a 'ready_for_review' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    pullRequest: PullRequest
+    resourcePath: URI
+    url: URI
+    typename__: Optional[Literal['ReadyForReviewEvent']] = Field(
+        'ReadyForReviewEvent', alias='__typename'
+    )
+
+
+class Ref(Node):
+    """
+    Represents a Git reference.
+    """
+
+    associatedPullRequests: PullRequestConnection
+    branchProtectionRule: Optional[BranchProtectionRule] = None
+    compare: Optional[Comparison] = None
+    id: ID
+    name: String
+    prefix: String
+    refUpdateRule: Optional[RefUpdateRule] = None
+    repository: Repository
+    target: Optional[GitObject] = None
+    typename__: Optional[Literal['Ref']] = Field('Ref', alias='__typename')
+
+
+class RefConnection(BaseModel):
+    """
+    The connection type for Ref.
+    """
+
+    edges: Optional[List[Optional[RefEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Ref]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['RefConnection']] = Field(
+        'RefConnection', alias='__typename'
+    )
+
+
+class RefEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Ref] = None
+    typename__: Optional[Literal['RefEdge']] = Field('RefEdge', alias='__typename')
+
+
+class RefNameConditionTarget(BaseModel):
+    """
+    Parameters to be used for the ref_name condition
+    """
+
+    exclude: List[String]
+    include: List[String]
+    typename__: Optional[Literal['RefNameConditionTarget']] = Field(
+        'RefNameConditionTarget', alias='__typename'
+    )
+
+
+class RefUpdateRule(BaseModel):
+    """
+    A ref update rules for a viewer.
+    """
+
+    allowsDeletions: Boolean
+    allowsForcePushes: Boolean
+    blocksCreations: Boolean
+    pattern: String
+    requiredApprovingReviewCount: Optional[Int] = None
+    requiredStatusCheckContexts: Optional[List[Optional[String]]] = Field(
+        default_factory=list
+    )
+    requiresCodeOwnerReviews: Boolean
+    requiresConversationResolution: Boolean
+    requiresLinearHistory: Boolean
+    requiresSignatures: Boolean
+    viewerAllowedToDismissReviews: Boolean
+    viewerCanPush: Boolean
+    typename__: Optional[Literal['RefUpdateRule']] = Field(
+        'RefUpdateRule', alias='__typename'
+    )
+
+
+class ReferencedEvent(Node):
+    """
+    Represents a 'referenced' event on a given `ReferencedSubject`.
+    """
+
+    actor: Optional[Actor] = None
+    commit: Optional[Commit] = None
+    commitRepository: Repository
+    createdAt: DateTime
+    id: ID
+    isCrossRepository: Boolean
+    isDirectReference: Boolean
+    subject: ReferencedSubject
+    typename__: Optional[Literal['ReferencedEvent']] = Field(
+        'ReferencedEvent', alias='__typename'
+    )
+
+
+class RegenerateEnterpriseIdentityProviderRecoveryCodesPayload(BaseModel):
+    """
+    Autogenerated return type of RegenerateEnterpriseIdentityProviderRecoveryCodes
+    """
+
+    clientMutationId: Optional[String] = None
+    identityProvider: Optional[EnterpriseIdentityProvider] = None
+    typename__: Optional[
+        Literal['RegenerateEnterpriseIdentityProviderRecoveryCodesPayload']
+    ] = Field(
+        'RegenerateEnterpriseIdentityProviderRecoveryCodesPayload', alias='__typename'
+    )
+
+
+class RegenerateVerifiableDomainTokenPayload(BaseModel):
+    """
+    Autogenerated return type of RegenerateVerifiableDomainToken
+    """
+
+    clientMutationId: Optional[String] = None
+    verificationToken: Optional[String] = None
+    typename__: Optional[Literal['RegenerateVerifiableDomainTokenPayload']] = Field(
+        'RegenerateVerifiableDomainTokenPayload', alias='__typename'
+    )
+
+
+class RejectDeploymentsPayload(BaseModel):
+    """
+    Autogenerated return type of RejectDeployments
+    """
+
+    clientMutationId: Optional[String] = None
+    deployments: Optional[List[Deployment]] = Field(default_factory=list)
+    typename__: Optional[Literal['RejectDeploymentsPayload']] = Field(
+        'RejectDeploymentsPayload', alias='__typename'
+    )
+
+
+class Release(Node, Reactable, UniformResourceLocatable):
+    """
+    A release contains the content for a release.
+    """
+
+    author: Optional[User] = None
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    description: Optional[String] = None
+    descriptionHTML: Optional[HTML] = None
+    id: ID
+    isDraft: Boolean
+    isLatest: Boolean
+    isPrerelease: Boolean
+    mentions: Optional[UserConnection] = None
+    name: Optional[String] = None
+    publishedAt: Optional[DateTime] = None
+    reactionGroups: Optional[List[ReactionGroup]] = Field(default_factory=list)
+    reactions: ReactionConnection
+    releaseAssets: ReleaseAssetConnection
+    repository: Repository
+    resourcePath: URI
+    shortDescriptionHTML: Optional[HTML] = None
+    tag: Optional[Ref] = None
+    tagCommit: Optional[Commit] = None
+    tagName: String
+    updatedAt: DateTime
+    url: URI
+    viewerCanReact: Boolean
+    typename__: Optional[Literal['Release']] = Field('Release', alias='__typename')
+
+
+class ReleaseAsset(Node):
+    """
+    A release asset contains the content for a release asset.
+    """
+
+    contentType: String
+    createdAt: DateTime
+    downloadCount: Int
+    downloadUrl: URI
+    id: ID
+    name: String
+    release: Optional[Release] = None
+    size: Int
+    updatedAt: DateTime
+    uploadedBy: User
+    url: URI
+    typename__: Optional[Literal['ReleaseAsset']] = Field(
+        'ReleaseAsset', alias='__typename'
+    )
+
+
+class ReleaseAssetConnection(BaseModel):
+    """
+    The connection type for ReleaseAsset.
+    """
+
+    edges: Optional[List[Optional[ReleaseAssetEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[ReleaseAsset]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ReleaseAssetConnection']] = Field(
+        'ReleaseAssetConnection', alias='__typename'
+    )
+
+
+class ReleaseAssetEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[ReleaseAsset] = None
+    typename__: Optional[Literal['ReleaseAssetEdge']] = Field(
+        'ReleaseAssetEdge', alias='__typename'
+    )
+
+
+class ReleaseConnection(BaseModel):
+    """
+    The connection type for Release.
+    """
+
+    edges: Optional[List[Optional[ReleaseEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Release]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ReleaseConnection']] = Field(
+        'ReleaseConnection', alias='__typename'
+    )
+
+
+class ReleaseEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Release] = None
+    typename__: Optional[Literal['ReleaseEdge']] = Field(
+        'ReleaseEdge', alias='__typename'
+    )
+
+
+class RemoveAssigneesFromAssignablePayload(BaseModel):
+    """
+    Autogenerated return type of RemoveAssigneesFromAssignable
+    """
+
+    assignable: Optional[Assignable] = None
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['RemoveAssigneesFromAssignablePayload']] = Field(
+        'RemoveAssigneesFromAssignablePayload', alias='__typename'
+    )
+
+
+class RemoveEnterpriseAdminPayload(BaseModel):
+    """
+    Autogenerated return type of RemoveEnterpriseAdmin
+    """
+
+    admin: Optional[User] = None
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    message: Optional[String] = None
+    viewer: Optional[User] = None
+    typename__: Optional[Literal['RemoveEnterpriseAdminPayload']] = Field(
+        'RemoveEnterpriseAdminPayload', alias='__typename'
+    )
+
+
+class RemoveEnterpriseIdentityProviderPayload(BaseModel):
+    """
+    Autogenerated return type of RemoveEnterpriseIdentityProvider
+    """
+
+    clientMutationId: Optional[String] = None
+    identityProvider: Optional[EnterpriseIdentityProvider] = None
+    typename__: Optional[Literal['RemoveEnterpriseIdentityProviderPayload']] = Field(
+        'RemoveEnterpriseIdentityProviderPayload', alias='__typename'
+    )
+
+
+class RemoveEnterpriseMemberPayload(BaseModel):
+    """
+    Autogenerated return type of RemoveEnterpriseMember
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    user: Optional[User] = None
+    viewer: Optional[User] = None
+    typename__: Optional[Literal['RemoveEnterpriseMemberPayload']] = Field(
+        'RemoveEnterpriseMemberPayload', alias='__typename'
+    )
+
+
+class RemoveEnterpriseOrganizationPayload(BaseModel):
+    """
+    Autogenerated return type of RemoveEnterpriseOrganization
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    organization: Optional[Organization] = None
+    viewer: Optional[User] = None
+    typename__: Optional[Literal['RemoveEnterpriseOrganizationPayload']] = Field(
+        'RemoveEnterpriseOrganizationPayload', alias='__typename'
+    )
+
+
+class RemoveEnterpriseSupportEntitlementPayload(BaseModel):
+    """
+    Autogenerated return type of RemoveEnterpriseSupportEntitlement
+    """
+
+    clientMutationId: Optional[String] = None
+    message: Optional[String] = None
+    typename__: Optional[Literal['RemoveEnterpriseSupportEntitlementPayload']] = Field(
+        'RemoveEnterpriseSupportEntitlementPayload', alias='__typename'
+    )
+
+
+class RemoveLabelsFromLabelablePayload(BaseModel):
+    """
+    Autogenerated return type of RemoveLabelsFromLabelable
+    """
+
+    clientMutationId: Optional[String] = None
+    labelable: Optional[Labelable] = None
+    typename__: Optional[Literal['RemoveLabelsFromLabelablePayload']] = Field(
+        'RemoveLabelsFromLabelablePayload', alias='__typename'
+    )
+
+
+class RemoveOutsideCollaboratorPayload(BaseModel):
+    """
+    Autogenerated return type of RemoveOutsideCollaborator
+    """
+
+    clientMutationId: Optional[String] = None
+    removedUser: Optional[User] = None
+    typename__: Optional[Literal['RemoveOutsideCollaboratorPayload']] = Field(
+        'RemoveOutsideCollaboratorPayload', alias='__typename'
+    )
+
+
+class RemoveReactionPayload(BaseModel):
+    """
+    Autogenerated return type of RemoveReaction
+    """
+
+    clientMutationId: Optional[String] = None
+    reaction: Optional[Reaction] = None
+    reactionGroups: Optional[List[ReactionGroup]] = Field(default_factory=list)
+    subject: Optional[Reactable] = None
+    typename__: Optional[Literal['RemoveReactionPayload']] = Field(
+        'RemoveReactionPayload', alias='__typename'
+    )
+
+
+class RemoveStarPayload(BaseModel):
+    """
+    Autogenerated return type of RemoveStar
+    """
+
+    clientMutationId: Optional[String] = None
+    starrable: Optional[Starrable] = None
+    typename__: Optional[Literal['RemoveStarPayload']] = Field(
+        'RemoveStarPayload', alias='__typename'
+    )
+
+
+class RemoveUpvotePayload(BaseModel):
+    """
+    Autogenerated return type of RemoveUpvote
+    """
+
+    clientMutationId: Optional[String] = None
+    subject: Optional[Votable] = None
+    typename__: Optional[Literal['RemoveUpvotePayload']] = Field(
+        'RemoveUpvotePayload', alias='__typename'
+    )
+
+
+class RemovedFromMergeQueueEvent(Node):
+    """
+    Represents a 'removed_from_merge_queue' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    beforeCommit: Optional[Commit] = None
+    createdAt: DateTime
+    enqueuer: Optional[User] = None
+    id: ID
+    mergeQueue: Optional[MergeQueue] = None
+    pullRequest: Optional[PullRequest] = None
+    reason: Optional[String] = None
+    typename__: Optional[Literal['RemovedFromMergeQueueEvent']] = Field(
+        'RemovedFromMergeQueueEvent', alias='__typename'
+    )
+
+
+class RemovedFromProjectEvent(Node):
+    """
+    Represents a 'removed_from_project' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    id: ID
+    project: Optional[Project] = None
+    projectColumnName: String
+    typename__: Optional[Literal['RemovedFromProjectEvent']] = Field(
+        'RemovedFromProjectEvent', alias='__typename'
+    )
+
+
+class RenamedTitleEvent(Node):
+    """
+    Represents a 'renamed' event on a given issue or pull request
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    currentTitle: String
+    id: ID
+    previousTitle: String
+    subject: RenamedTitleSubject
+    typename__: Optional[Literal['RenamedTitleEvent']] = Field(
+        'RenamedTitleEvent', alias='__typename'
+    )
+
+
+class ReopenDiscussionPayload(BaseModel):
+    """
+    Autogenerated return type of ReopenDiscussion
+    """
+
+    clientMutationId: Optional[String] = None
+    discussion: Optional[Discussion] = None
+    typename__: Optional[Literal['ReopenDiscussionPayload']] = Field(
+        'ReopenDiscussionPayload', alias='__typename'
+    )
+
+
+class ReopenIssuePayload(BaseModel):
+    """
+    Autogenerated return type of ReopenIssue
+    """
+
+    clientMutationId: Optional[String] = None
+    issue: Optional[Issue] = None
+    typename__: Optional[Literal['ReopenIssuePayload']] = Field(
+        'ReopenIssuePayload', alias='__typename'
+    )
+
+
+class ReopenPullRequestPayload(BaseModel):
+    """
+    Autogenerated return type of ReopenPullRequest
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['ReopenPullRequestPayload']] = Field(
+        'ReopenPullRequestPayload', alias='__typename'
+    )
+
+
+class ReopenedEvent(Node):
+    """
+    Represents a 'reopened' event on any `Closable`.
+    """
+
+    actor: Optional[Actor] = None
+    closable: Closable
+    createdAt: DateTime
+    id: ID
+    stateReason: Optional[IssueStateReason] = None
+    typename__: Optional[Literal['ReopenedEvent']] = Field(
+        'ReopenedEvent', alias='__typename'
+    )
+
+
+class RepoAccessAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, RepositoryAuditEntryData
+):
+    """
+    Audit log entry for a repo.access event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    visibility: Optional[RepoAccessAuditEntryVisibility] = None
+    typename__: Optional[Literal['RepoAccessAuditEntry']] = Field(
+        'RepoAccessAuditEntry', alias='__typename'
+    )
+
+
+class RepoAddMemberAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, RepositoryAuditEntryData
+):
+    """
+    Audit log entry for a repo.add_member event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    visibility: Optional[RepoAddMemberAuditEntryVisibility] = None
+    typename__: Optional[Literal['RepoAddMemberAuditEntry']] = Field(
+        'RepoAddMemberAuditEntry', alias='__typename'
+    )
+
+
+class RepoAddTopicAuditEntry(
+    AuditEntry,
+    Node,
+    OrganizationAuditEntryData,
+    RepositoryAuditEntryData,
+    TopicAuditEntryData,
+):
+    """
+    Audit log entry for a repo.add_topic event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    topic: Optional[Topic] = None
+    topicName: Optional[String] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['RepoAddTopicAuditEntry']] = Field(
+        'RepoAddTopicAuditEntry', alias='__typename'
+    )
+
+
+class RepoArchivedAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, RepositoryAuditEntryData
+):
+    """
+    Audit log entry for a repo.archived event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    visibility: Optional[RepoArchivedAuditEntryVisibility] = None
+    typename__: Optional[Literal['RepoArchivedAuditEntry']] = Field(
+        'RepoArchivedAuditEntry', alias='__typename'
+    )
+
+
+class RepoChangeMergeSettingAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, RepositoryAuditEntryData
+):
+    """
+    Audit log entry for a repo.change_merge_setting event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    isEnabled: Optional[Boolean] = None
+    mergeType: Optional[RepoChangeMergeSettingAuditEntryMergeType] = None
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['RepoChangeMergeSettingAuditEntry']] = Field(
+        'RepoChangeMergeSettingAuditEntry', alias='__typename'
+    )
+
+
+class RepoConfigDisableAnonymousGitAccessAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, RepositoryAuditEntryData
+):
+    """
+    Audit log entry for a repo.config.disable_anonymous_git_access event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[
+        Literal['RepoConfigDisableAnonymousGitAccessAuditEntry']
+    ] = Field('RepoConfigDisableAnonymousGitAccessAuditEntry', alias='__typename')
+
+
+class RepoConfigDisableCollaboratorsOnlyAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, RepositoryAuditEntryData
+):
+    """
+    Audit log entry for a repo.config.disable_collaborators_only event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[
+        Literal['RepoConfigDisableCollaboratorsOnlyAuditEntry']
+    ] = Field('RepoConfigDisableCollaboratorsOnlyAuditEntry', alias='__typename')
+
+
+class RepoConfigDisableContributorsOnlyAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, RepositoryAuditEntryData
+):
+    """
+    Audit log entry for a repo.config.disable_contributors_only event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[
+        Literal['RepoConfigDisableContributorsOnlyAuditEntry']
+    ] = Field('RepoConfigDisableContributorsOnlyAuditEntry', alias='__typename')
+
+
+class RepoConfigDisableSockpuppetDisallowedAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, RepositoryAuditEntryData
+):
+    """
+    Audit log entry for a repo.config.disable_sockpuppet_disallowed event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[
+        Literal['RepoConfigDisableSockpuppetDisallowedAuditEntry']
+    ] = Field('RepoConfigDisableSockpuppetDisallowedAuditEntry', alias='__typename')
+
+
+class RepoConfigEnableAnonymousGitAccessAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, RepositoryAuditEntryData
+):
+    """
+    Audit log entry for a repo.config.enable_anonymous_git_access event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[
+        Literal['RepoConfigEnableAnonymousGitAccessAuditEntry']
+    ] = Field('RepoConfigEnableAnonymousGitAccessAuditEntry', alias='__typename')
+
+
+class RepoConfigEnableCollaboratorsOnlyAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, RepositoryAuditEntryData
+):
+    """
+    Audit log entry for a repo.config.enable_collaborators_only event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[
+        Literal['RepoConfigEnableCollaboratorsOnlyAuditEntry']
+    ] = Field('RepoConfigEnableCollaboratorsOnlyAuditEntry', alias='__typename')
+
+
+class RepoConfigEnableContributorsOnlyAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, RepositoryAuditEntryData
+):
+    """
+    Audit log entry for a repo.config.enable_contributors_only event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['RepoConfigEnableContributorsOnlyAuditEntry']] = Field(
+        'RepoConfigEnableContributorsOnlyAuditEntry', alias='__typename'
+    )
+
+
+class RepoConfigEnableSockpuppetDisallowedAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, RepositoryAuditEntryData
+):
+    """
+    Audit log entry for a repo.config.enable_sockpuppet_disallowed event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[
+        Literal['RepoConfigEnableSockpuppetDisallowedAuditEntry']
+    ] = Field('RepoConfigEnableSockpuppetDisallowedAuditEntry', alias='__typename')
+
+
+class RepoConfigLockAnonymousGitAccessAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, RepositoryAuditEntryData
+):
+    """
+    Audit log entry for a repo.config.lock_anonymous_git_access event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['RepoConfigLockAnonymousGitAccessAuditEntry']] = Field(
+        'RepoConfigLockAnonymousGitAccessAuditEntry', alias='__typename'
+    )
+
+
+class RepoConfigUnlockAnonymousGitAccessAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, RepositoryAuditEntryData
+):
+    """
+    Audit log entry for a repo.config.unlock_anonymous_git_access event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[
+        Literal['RepoConfigUnlockAnonymousGitAccessAuditEntry']
+    ] = Field('RepoConfigUnlockAnonymousGitAccessAuditEntry', alias='__typename')
+
+
+class RepoCreateAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, RepositoryAuditEntryData
+):
+    """
+    Audit log entry for a repo.create event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    forkParentName: Optional[String] = None
+    forkSourceName: Optional[String] = None
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    visibility: Optional[RepoCreateAuditEntryVisibility] = None
+    typename__: Optional[Literal['RepoCreateAuditEntry']] = Field(
+        'RepoCreateAuditEntry', alias='__typename'
+    )
+
+
+class RepoDestroyAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, RepositoryAuditEntryData
+):
+    """
+    Audit log entry for a repo.destroy event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    visibility: Optional[RepoDestroyAuditEntryVisibility] = None
+    typename__: Optional[Literal['RepoDestroyAuditEntry']] = Field(
+        'RepoDestroyAuditEntry', alias='__typename'
+    )
+
+
+class RepoRemoveMemberAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, RepositoryAuditEntryData
+):
+    """
+    Audit log entry for a repo.remove_member event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    visibility: Optional[RepoRemoveMemberAuditEntryVisibility] = None
+    typename__: Optional[Literal['RepoRemoveMemberAuditEntry']] = Field(
+        'RepoRemoveMemberAuditEntry', alias='__typename'
+    )
+
+
+class RepoRemoveTopicAuditEntry(
+    AuditEntry,
+    Node,
+    OrganizationAuditEntryData,
+    RepositoryAuditEntryData,
+    TopicAuditEntryData,
+):
+    """
+    Audit log entry for a repo.remove_topic event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    topic: Optional[Topic] = None
+    topicName: Optional[String] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['RepoRemoveTopicAuditEntry']] = Field(
+        'RepoRemoveTopicAuditEntry', alias='__typename'
+    )
+
+
+class Repository(
+    Node,
+    PackageOwner,
+    ProjectOwner,
+    ProjectV2Recent,
+    RepositoryInfo,
+    Starrable,
+    Subscribable,
+    UniformResourceLocatable,
+):
+    """
+    A repository contains the content for a project.
+    """
+
+    allowUpdateBranch: Boolean
+    archivedAt: Optional[DateTime] = None
+    assignableUsers: UserConnection
+    autoMergeAllowed: Boolean
+    branchProtectionRules: BranchProtectionRuleConnection
+    codeOfConduct: Optional[CodeOfConduct] = None
+    codeowners: Optional[RepositoryCodeowners] = None
+    collaborators: Optional[RepositoryCollaboratorConnection] = None
+    commitComments: CommitCommentConnection
+    contactLinks: Optional[List[RepositoryContactLink]] = Field(default_factory=list)
+    contributingGuidelines: Optional[ContributingGuidelines] = None
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    defaultBranchRef: Optional[Ref] = None
+    deleteBranchOnMerge: Boolean
+    dependencyGraphManifests: Optional[DependencyGraphManifestConnection] = None
+    deployKeys: DeployKeyConnection
+    deployments: DeploymentConnection
+    description: Optional[String] = None
+    descriptionHTML: HTML
+    discussion: Optional[Discussion] = None
+    discussionCategories: DiscussionCategoryConnection
+    discussionCategory: Optional[DiscussionCategory] = None
+    discussions: DiscussionConnection
+    diskUsage: Optional[Int] = None
+    environment: Optional[Environment] = None
+    environments: EnvironmentConnection
+    forkCount: Int
+    forkingAllowed: Boolean
+    forks: RepositoryConnection
+    fundingLinks: List[FundingLink]
+    hasDiscussionsEnabled: Boolean
+    hasIssuesEnabled: Boolean
+    hasProjectsEnabled: Boolean
+    hasVulnerabilityAlertsEnabled: Boolean
+    hasWikiEnabled: Boolean
+    homepageUrl: Optional[URI] = None
+    id: ID
+    interactionAbility: Optional[RepositoryInteractionAbility] = None
+    isArchived: Boolean
+    isBlankIssuesEnabled: Boolean
+    isDisabled: Boolean
+    isEmpty: Boolean
+    isFork: Boolean
+    isInOrganization: Boolean
+    isLocked: Boolean
+    isMirror: Boolean
+    isPrivate: Boolean
+    isSecurityPolicyEnabled: Optional[Boolean] = None
+    isTemplate: Boolean
+    isUserConfigurationRepository: Boolean
+    issue: Optional[Issue] = None
+    issueOrPullRequest: Optional[IssueOrPullRequest] = None
+    issueTemplates: Optional[List[IssueTemplate]] = Field(default_factory=list)
+    issues: IssueConnection
+    label: Optional[Label] = None
+    labels: Optional[LabelConnection] = None
+    languages: Optional[LanguageConnection] = None
+    latestRelease: Optional[Release] = None
+    licenseInfo: Optional[License] = None
+    lockReason: Optional[RepositoryLockReason] = None
+    mentionableUsers: UserConnection
+    mergeCommitAllowed: Boolean
+    mergeCommitMessage: MergeCommitMessage
+    mergeCommitTitle: MergeCommitTitle
+    mergeQueue: Optional[MergeQueue] = None
+    milestone: Optional[Milestone] = None
+    milestones: Optional[MilestoneConnection] = None
+    mirrorUrl: Optional[URI] = None
+    name: String
+    nameWithOwner: String
+    object: Optional[GitObject] = None
+    openGraphImageUrl: URI
+    owner: RepositoryOwner
+    packages: PackageConnection
+    parent: Optional[Repository] = None
+    pinnedDiscussions: PinnedDiscussionConnection
+    pinnedIssues: Optional[PinnedIssueConnection] = None
+    primaryLanguage: Optional[Language] = None
+    project: Optional[Project] = None
+    projectV2: Optional[ProjectV2] = None
+    projects: ProjectConnection
+    projectsResourcePath: URI
+    projectsUrl: URI
+    projectsV2: ProjectV2Connection
+    pullRequest: Optional[PullRequest] = None
+    pullRequestTemplates: Optional[List[PullRequestTemplate]] = Field(
+        default_factory=list
+    )
+    pullRequests: PullRequestConnection
+    pushedAt: Optional[DateTime] = None
+    rebaseMergeAllowed: Boolean
+    recentProjects: ProjectV2Connection
+    ref: Optional[Ref] = None
+    refs: Optional[RefConnection] = None
+    release: Optional[Release] = None
+    releases: ReleaseConnection
+    repositoryTopics: RepositoryTopicConnection
+    resourcePath: URI
+    ruleset: Optional[RepositoryRuleset] = None
+    rulesets: Optional[RepositoryRulesetConnection] = None
+    securityPolicyUrl: Optional[URI] = None
+    shortDescriptionHTML: HTML
+    squashMergeAllowed: Boolean
+    squashMergeCommitMessage: SquashMergeCommitMessage
+    squashMergeCommitTitle: SquashMergeCommitTitle
+    squashPrTitleUsedAsDefault: Boolean
+    sshUrl: GitSSHRemote
+    stargazerCount: Int
+    stargazers: StargazerConnection
+    submodules: SubmoduleConnection
+    tempCloneToken: Optional[String] = None
+    templateRepository: Optional[Repository] = None
+    updatedAt: DateTime
+    url: URI
+    usesCustomOpenGraphImage: Boolean
+    viewerCanAdminister: Boolean
+    viewerCanCreateProjects: Boolean
+    viewerCanSubscribe: Boolean
+    viewerCanUpdateTopics: Boolean
+    viewerDefaultCommitEmail: Optional[String] = None
+    viewerDefaultMergeMethod: PullRequestMergeMethod
+    viewerHasStarred: Boolean
+    viewerPermission: Optional[RepositoryPermission] = None
+    viewerPossibleCommitEmails: Optional[List[String]] = Field(default_factory=list)
+    viewerSubscription: Optional[SubscriptionState] = None
+    visibility: RepositoryVisibility
+    vulnerabilityAlert: Optional[RepositoryVulnerabilityAlert] = None
+    vulnerabilityAlerts: Optional[RepositoryVulnerabilityAlertConnection] = None
+    watchers: UserConnection
+    webCommitSignoffRequired: Boolean
+    typename__: Optional[Literal['Repository']] = Field(
+        'Repository', alias='__typename'
+    )
+
+
+class RepositoryCodeowners(BaseModel):
+    """
+    Information extracted from a repository's `CODEOWNERS` file.
+    """
+
+    errors: List[RepositoryCodeownersError]
+    typename__: Optional[Literal['RepositoryCodeowners']] = Field(
+        'RepositoryCodeowners', alias='__typename'
+    )
+
+
+class RepositoryCodeownersError(BaseModel):
+    """
+    An error in a `CODEOWNERS` file.
+    """
+
+    column: Int
+    kind: String
+    line: Int
+    message: String
+    path: String
+    source: String
+    suggestion: Optional[String] = None
+    typename__: Optional[Literal['RepositoryCodeownersError']] = Field(
+        'RepositoryCodeownersError', alias='__typename'
+    )
+
+
+class RepositoryCollaboratorConnection(BaseModel):
+    """
+    The connection type for User.
+    """
+
+    edges: Optional[List[Optional[RepositoryCollaboratorEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[User]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['RepositoryCollaboratorConnection']] = Field(
+        'RepositoryCollaboratorConnection', alias='__typename'
+    )
+
+
+class RepositoryCollaboratorEdge(BaseModel):
+    """
+    Represents a user who is a collaborator of a repository.
+    """
+
+    cursor: String
+    node: User
+    permission: RepositoryPermission
+    permissionSources: Optional[List[PermissionSource]] = Field(default_factory=list)
+    typename__: Optional[Literal['RepositoryCollaboratorEdge']] = Field(
+        'RepositoryCollaboratorEdge', alias='__typename'
+    )
+
+
+class RepositoryConnection(BaseModel):
+    """
+    A list of repositories owned by the subject.
+    """
+
+    edges: Optional[List[Optional[RepositoryEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Repository]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    totalDiskUsage: Int
+    typename__: Optional[Literal['RepositoryConnection']] = Field(
+        'RepositoryConnection', alias='__typename'
+    )
+
+
+class RepositoryContactLink(BaseModel):
+    """
+    A repository contact link.
+    """
+
+    about: String
+    name: String
+    url: URI
+    typename__: Optional[Literal['RepositoryContactLink']] = Field(
+        'RepositoryContactLink', alias='__typename'
+    )
+
+
+class RepositoryEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Repository] = None
+    typename__: Optional[Literal['RepositoryEdge']] = Field(
+        'RepositoryEdge', alias='__typename'
+    )
+
+
+class RepositoryIdConditionTarget(BaseModel):
+    """
+    Parameters to be used for the repository_id condition
+    """
+
+    repositoryIds: List[ID]
+    typename__: Optional[Literal['RepositoryIdConditionTarget']] = Field(
+        'RepositoryIdConditionTarget', alias='__typename'
+    )
+
+
+class RepositoryInteractionAbility(BaseModel):
+    """
+    Repository interaction limit that applies to this object.
+    """
+
+    expiresAt: Optional[DateTime] = None
+    limit: RepositoryInteractionLimit
+    origin: RepositoryInteractionLimitOrigin
+    typename__: Optional[Literal['RepositoryInteractionAbility']] = Field(
+        'RepositoryInteractionAbility', alias='__typename'
+    )
+
+
+class RepositoryInvitation(Node):
+    """
+    An invitation for a user to be added to a repository.
+    """
+
+    email: Optional[String] = None
+    id: ID
+    invitee: Optional[User] = None
+    inviter: User
+    permalink: URI
+    permission: RepositoryPermission
+    repository: Optional[RepositoryInfo] = None
+    typename__: Optional[Literal['RepositoryInvitation']] = Field(
+        'RepositoryInvitation', alias='__typename'
+    )
+
+
+class RepositoryInvitationConnection(BaseModel):
+    """
+    A list of repository invitations.
+    """
+
+    edges: Optional[List[Optional[RepositoryInvitationEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[RepositoryInvitation]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['RepositoryInvitationConnection']] = Field(
+        'RepositoryInvitationConnection', alias='__typename'
+    )
+
+
+class RepositoryInvitationEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[RepositoryInvitation] = None
+    typename__: Optional[Literal['RepositoryInvitationEdge']] = Field(
+        'RepositoryInvitationEdge', alias='__typename'
+    )
+
+
+class RepositoryMigration(Migration, Node):
+    """
+    A GitHub Enterprise Importer (GEI) repository migration.
+    """
+
+    continueOnError: Boolean
+    createdAt: DateTime
+    databaseId: Optional[String] = None
+    failureReason: Optional[String] = None
+    id: ID
+    migrationLogUrl: Optional[URI] = None
+    migrationSource: MigrationSource
+    repositoryName: String
+    sourceUrl: URI
+    state: MigrationState
+    warningsCount: Int
+    typename__: Optional[Literal['RepositoryMigration']] = Field(
+        'RepositoryMigration', alias='__typename'
+    )
+
+
+class RepositoryMigrationConnection(BaseModel):
+    """
+    The connection type for RepositoryMigration.
+    """
+
+    edges: Optional[List[Optional[RepositoryMigrationEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[RepositoryMigration]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['RepositoryMigrationConnection']] = Field(
+        'RepositoryMigrationConnection', alias='__typename'
+    )
+
+
+class RepositoryMigrationEdge(BaseModel):
+    """
+    Represents a repository migration.
+    """
+
+    cursor: String
+    node: Optional[RepositoryMigration] = None
+    typename__: Optional[Literal['RepositoryMigrationEdge']] = Field(
+        'RepositoryMigrationEdge', alias='__typename'
+    )
+
+
+class RepositoryNameConditionTarget(BaseModel):
+    """
+    Parameters to be used for the repository_name condition
+    """
+
+    exclude: List[String]
+    include: List[String]
+    protected: Boolean
+    typename__: Optional[Literal['RepositoryNameConditionTarget']] = Field(
+        'RepositoryNameConditionTarget', alias='__typename'
+    )
+
+
+class RepositoryRule(Node):
+    """
+    A repository rule.
+    """
+
+    id: ID
+    parameters: Optional[RuleParameters] = None
+    repositoryRuleset: Optional[RepositoryRuleset] = None
+    type: RepositoryRuleType
+    typename__: Optional[Literal['RepositoryRule']] = Field(
+        'RepositoryRule', alias='__typename'
+    )
+
+
+class RepositoryRuleConditions(BaseModel):
+    """
+    Set of conditions that determine if a ruleset will evaluate
+    """
+
+    refName: Optional[RefNameConditionTarget] = None
+    repositoryId: Optional[RepositoryIdConditionTarget] = None
+    repositoryName: Optional[RepositoryNameConditionTarget] = None
+    typename__: Optional[Literal['RepositoryRuleConditions']] = Field(
+        'RepositoryRuleConditions', alias='__typename'
+    )
+
+
+class RepositoryRuleConnection(BaseModel):
+    """
+    The connection type for RepositoryRule.
+    """
+
+    edges: Optional[List[Optional[RepositoryRuleEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[RepositoryRule]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['RepositoryRuleConnection']] = Field(
+        'RepositoryRuleConnection', alias='__typename'
+    )
+
+
+class RepositoryRuleEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[RepositoryRule] = None
+    typename__: Optional[Literal['RepositoryRuleEdge']] = Field(
+        'RepositoryRuleEdge', alias='__typename'
+    )
+
+
+class RepositoryRuleset(Node):
+    """
+    A repository ruleset.
+    """
+
+    bypassActors: Optional[RepositoryRulesetBypassActorConnection] = None
+    conditions: RepositoryRuleConditions
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    enforcement: RuleEnforcement
+    id: ID
+    name: String
+    rules: Optional[RepositoryRuleConnection] = None
+    source: RuleSource
+    target: Optional[RepositoryRulesetTarget] = None
+    updatedAt: DateTime
+    typename__: Optional[Literal['RepositoryRuleset']] = Field(
+        'RepositoryRuleset', alias='__typename'
+    )
+
+
+class RepositoryRulesetBypassActor(Node):
+    """
+    A team or app that has the ability to bypass a rules defined on a ruleset
+    """
+
+    actor: Optional[BypassActor] = None
+    bypassMode: Optional[RepositoryRulesetBypassActorBypassMode] = None
+    id: ID
+    organizationAdmin: Boolean
+    repositoryRoleDatabaseId: Optional[Int] = None
+    repositoryRoleName: Optional[String] = None
+    repositoryRuleset: Optional[RepositoryRuleset] = None
+    typename__: Optional[Literal['RepositoryRulesetBypassActor']] = Field(
+        'RepositoryRulesetBypassActor', alias='__typename'
+    )
+
+
+class RepositoryRulesetBypassActorConnection(BaseModel):
+    """
+    The connection type for RepositoryRulesetBypassActor.
+    """
+
+    edges: Optional[List[Optional[RepositoryRulesetBypassActorEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[RepositoryRulesetBypassActor]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['RepositoryRulesetBypassActorConnection']] = Field(
+        'RepositoryRulesetBypassActorConnection', alias='__typename'
+    )
+
+
+class RepositoryRulesetBypassActorEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[RepositoryRulesetBypassActor] = None
+    typename__: Optional[Literal['RepositoryRulesetBypassActorEdge']] = Field(
+        'RepositoryRulesetBypassActorEdge', alias='__typename'
+    )
+
+
+class RepositoryRulesetConnection(BaseModel):
+    """
+    The connection type for RepositoryRuleset.
+    """
+
+    edges: Optional[List[Optional[RepositoryRulesetEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[RepositoryRuleset]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['RepositoryRulesetConnection']] = Field(
+        'RepositoryRulesetConnection', alias='__typename'
+    )
+
+
+class RepositoryRulesetEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[RepositoryRuleset] = None
+    typename__: Optional[Literal['RepositoryRulesetEdge']] = Field(
+        'RepositoryRulesetEdge', alias='__typename'
+    )
+
+
+class RepositoryTopic(Node, UniformResourceLocatable):
+    """
+    A repository-topic connects a repository to a topic.
+    """
+
+    id: ID
+    resourcePath: URI
+    topic: Topic
+    url: URI
+    typename__: Optional[Literal['RepositoryTopic']] = Field(
+        'RepositoryTopic', alias='__typename'
+    )
+
+
+class RepositoryTopicConnection(BaseModel):
+    """
+    The connection type for RepositoryTopic.
+    """
+
+    edges: Optional[List[Optional[RepositoryTopicEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[RepositoryTopic]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['RepositoryTopicConnection']] = Field(
+        'RepositoryTopicConnection', alias='__typename'
+    )
+
+
+class RepositoryTopicEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[RepositoryTopic] = None
+    typename__: Optional[Literal['RepositoryTopicEdge']] = Field(
+        'RepositoryTopicEdge', alias='__typename'
+    )
+
+
+class RepositoryVisibilityChangeDisableAuditEntry(
+    AuditEntry, EnterpriseAuditEntryData, Node, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a repository_visibility_change.disable event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    enterpriseResourcePath: Optional[URI] = None
+    enterpriseSlug: Optional[String] = None
+    enterpriseUrl: Optional[URI] = None
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[
+        Literal['RepositoryVisibilityChangeDisableAuditEntry']
+    ] = Field('RepositoryVisibilityChangeDisableAuditEntry', alias='__typename')
+
+
+class RepositoryVisibilityChangeEnableAuditEntry(
+    AuditEntry, EnterpriseAuditEntryData, Node, OrganizationAuditEntryData
+):
+    """
+    Audit log entry for a repository_visibility_change.enable event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    enterpriseResourcePath: Optional[URI] = None
+    enterpriseSlug: Optional[String] = None
+    enterpriseUrl: Optional[URI] = None
+    id: ID
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['RepositoryVisibilityChangeEnableAuditEntry']] = Field(
+        'RepositoryVisibilityChangeEnableAuditEntry', alias='__typename'
+    )
+
+
+class RepositoryVulnerabilityAlert(Node, RepositoryNode):
+    """
+    A Dependabot alert for a repository with a dependency affected by a security vulnerability.
+    """
+
+    autoDismissedAt: Optional[DateTime] = None
+    createdAt: DateTime
+    dependabotUpdate: Optional[DependabotUpdate] = None
+    dependencyScope: Optional[RepositoryVulnerabilityAlertDependencyScope] = None
+    dismissComment: Optional[String] = None
+    dismissReason: Optional[String] = None
+    dismissedAt: Optional[DateTime] = None
+    dismisser: Optional[User] = None
+    fixedAt: Optional[DateTime] = None
+    id: ID
+    number: Int
+    repository: Repository
+    securityAdvisory: Optional[SecurityAdvisory] = None
+    securityVulnerability: Optional[SecurityVulnerability] = None
+    state: RepositoryVulnerabilityAlertState
+    vulnerableManifestFilename: String
+    vulnerableManifestPath: String
+    vulnerableRequirements: Optional[String] = None
+    typename__: Optional[Literal['RepositoryVulnerabilityAlert']] = Field(
+        'RepositoryVulnerabilityAlert', alias='__typename'
+    )
+
+
+class RepositoryVulnerabilityAlertConnection(BaseModel):
+    """
+    The connection type for RepositoryVulnerabilityAlert.
+    """
+
+    edges: Optional[List[Optional[RepositoryVulnerabilityAlertEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[RepositoryVulnerabilityAlert]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['RepositoryVulnerabilityAlertConnection']] = Field(
+        'RepositoryVulnerabilityAlertConnection', alias='__typename'
+    )
+
+
+class RepositoryVulnerabilityAlertEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[RepositoryVulnerabilityAlert] = None
+    typename__: Optional[Literal['RepositoryVulnerabilityAlertEdge']] = Field(
+        'RepositoryVulnerabilityAlertEdge', alias='__typename'
+    )
+
+
+class RequestReviewsPayload(BaseModel):
+    """
+    Autogenerated return type of RequestReviews
+    """
+
+    actor: Optional[Actor] = None
+    clientMutationId: Optional[String] = None
+    pullRequest: Optional[PullRequest] = None
+    requestedReviewersEdge: Optional[UserEdge] = None
+    typename__: Optional[Literal['RequestReviewsPayload']] = Field(
+        'RequestReviewsPayload', alias='__typename'
+    )
+
+
+class RequestedReviewerConnection(BaseModel):
+    """
+    The connection type for RequestedReviewer.
+    """
+
+    edges: Optional[List[Optional[RequestedReviewerEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[RequestedReviewer]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['RequestedReviewerConnection']] = Field(
+        'RequestedReviewerConnection', alias='__typename'
+    )
+
+
+class RequestedReviewerEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[RequestedReviewer] = None
+    typename__: Optional[Literal['RequestedReviewerEdge']] = Field(
+        'RequestedReviewerEdge', alias='__typename'
+    )
+
+
+class RequiredDeploymentsParameters(BaseModel):
+    """
+    Choose which environments must be successfully deployed to before refs can be pushed into a ref that matches this rule.
+    """
+
+    requiredDeploymentEnvironments: List[String]
+    typename__: Optional[Literal['RequiredDeploymentsParameters']] = Field(
+        'RequiredDeploymentsParameters', alias='__typename'
+    )
+
+
+class RequiredStatusCheckDescription(BaseModel):
+    """
+    Represents a required status check for a protected branch, but not any specific run of that check.
+    """
+
+    app: Optional[App] = None
+    context: String
+    typename__: Optional[Literal['RequiredStatusCheckDescription']] = Field(
+        'RequiredStatusCheckDescription', alias='__typename'
+    )
+
+
+class RequiredStatusChecksParameters(BaseModel):
+    """
+    Choose which status checks must pass before the ref is updated. When enabled,
+    commits must first be pushed to another ref where the checks pass.
+    """
+
+    requiredStatusChecks: List[StatusCheckConfiguration]
+    strictRequiredStatusChecksPolicy: Boolean
+    typename__: Optional[Literal['RequiredStatusChecksParameters']] = Field(
+        'RequiredStatusChecksParameters', alias='__typename'
+    )
+
+
+class RerequestCheckSuitePayload(BaseModel):
+    """
+    Autogenerated return type of RerequestCheckSuite
+    """
+
+    checkSuite: Optional[CheckSuite] = None
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['RerequestCheckSuitePayload']] = Field(
+        'RerequestCheckSuitePayload', alias='__typename'
+    )
+
+
+class ResolveReviewThreadPayload(BaseModel):
+    """
+    Autogenerated return type of ResolveReviewThread
+    """
+
+    clientMutationId: Optional[String] = None
+    thread: Optional[PullRequestReviewThread] = None
+    typename__: Optional[Literal['ResolveReviewThreadPayload']] = Field(
+        'ResolveReviewThreadPayload', alias='__typename'
+    )
+
+
+class RestrictedContribution(Contribution):
+    """
+    Represents a private contribution a user made on GitHub.
+    """
+
+    isRestricted: Boolean
+    occurredAt: DateTime
+    resourcePath: URI
+    url: URI
+    user: User
+    typename__: Optional[Literal['RestrictedContribution']] = Field(
+        'RestrictedContribution', alias='__typename'
+    )
+
+
+class RetireSponsorsTierPayload(BaseModel):
+    """
+    Autogenerated return type of RetireSponsorsTier
+    """
+
+    clientMutationId: Optional[String] = None
+    sponsorsTier: Optional[SponsorsTier] = None
+    typename__: Optional[Literal['RetireSponsorsTierPayload']] = Field(
+        'RetireSponsorsTierPayload', alias='__typename'
+    )
+
+
+class RevertPullRequestPayload(BaseModel):
+    """
+    Autogenerated return type of RevertPullRequest
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequest: Optional[PullRequest] = None
+    revertPullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['RevertPullRequestPayload']] = Field(
+        'RevertPullRequestPayload', alias='__typename'
+    )
+
+
+class ReviewDismissalAllowance(Node):
+    """
+    A user, team, or app who has the ability to dismiss a review on a protected branch.
+    """
+
+    actor: Optional[ReviewDismissalAllowanceActor] = None
+    branchProtectionRule: Optional[BranchProtectionRule] = None
+    id: ID
+    typename__: Optional[Literal['ReviewDismissalAllowance']] = Field(
+        'ReviewDismissalAllowance', alias='__typename'
+    )
+
+
+class ReviewDismissalAllowanceConnection(BaseModel):
+    """
+    The connection type for ReviewDismissalAllowance.
+    """
+
+    edges: Optional[List[Optional[ReviewDismissalAllowanceEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[ReviewDismissalAllowance]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ReviewDismissalAllowanceConnection']] = Field(
+        'ReviewDismissalAllowanceConnection', alias='__typename'
+    )
+
+
+class ReviewDismissalAllowanceEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[ReviewDismissalAllowance] = None
+    typename__: Optional[Literal['ReviewDismissalAllowanceEdge']] = Field(
+        'ReviewDismissalAllowanceEdge', alias='__typename'
+    )
+
+
+class ReviewDismissedEvent(Node, UniformResourceLocatable):
+    """
+    Represents a 'review_dismissed' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    dismissalMessage: Optional[String] = None
+    dismissalMessageHTML: Optional[String] = None
+    id: ID
+    previousReviewState: PullRequestReviewState
+    pullRequest: PullRequest
+    pullRequestCommit: Optional[PullRequestCommit] = None
+    resourcePath: URI
+    review: Optional[PullRequestReview] = None
+    url: URI
+    typename__: Optional[Literal['ReviewDismissedEvent']] = Field(
+        'ReviewDismissedEvent', alias='__typename'
+    )
+
+
+class ReviewRequest(Node):
+    """
+    A request for a user to review a pull request.
+    """
+
+    asCodeOwner: Boolean
+    databaseId: Optional[Int] = None
+    id: ID
+    pullRequest: PullRequest
+    requestedReviewer: Optional[RequestedReviewer] = None
+    typename__: Optional[Literal['ReviewRequest']] = Field(
+        'ReviewRequest', alias='__typename'
+    )
+
+
+class ReviewRequestConnection(BaseModel):
+    """
+    The connection type for ReviewRequest.
+    """
+
+    edges: Optional[List[Optional[ReviewRequestEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[ReviewRequest]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['ReviewRequestConnection']] = Field(
+        'ReviewRequestConnection', alias='__typename'
+    )
+
+
+class ReviewRequestEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[ReviewRequest] = None
+    typename__: Optional[Literal['ReviewRequestEdge']] = Field(
+        'ReviewRequestEdge', alias='__typename'
+    )
+
+
+class ReviewRequestRemovedEvent(Node):
+    """
+    Represents an 'review_request_removed' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    pullRequest: PullRequest
+    requestedReviewer: Optional[RequestedReviewer] = None
+    typename__: Optional[Literal['ReviewRequestRemovedEvent']] = Field(
+        'ReviewRequestRemovedEvent', alias='__typename'
+    )
+
+
+class ReviewRequestedEvent(Node):
+    """
+    Represents an 'review_requested' event on a given pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    pullRequest: PullRequest
+    requestedReviewer: Optional[RequestedReviewer] = None
+    typename__: Optional[Literal['ReviewRequestedEvent']] = Field(
+        'ReviewRequestedEvent', alias='__typename'
+    )
+
+
+class ReviewStatusHovercardContext(HovercardContext):
+    """
+    A hovercard context with a message describing the current code review state of the pull
+    request.
+    """
+
+    message: String
+    octicon: String
+    reviewDecision: Optional[PullRequestReviewDecision] = None
+    typename__: Optional[Literal['ReviewStatusHovercardContext']] = Field(
+        'ReviewStatusHovercardContext', alias='__typename'
+    )
+
+
+class RevokeEnterpriseOrganizationsMigratorRolePayload(BaseModel):
+    """
+    Autogenerated return type of RevokeEnterpriseOrganizationsMigratorRole
+    """
+
+    clientMutationId: Optional[String] = None
+    organizations: Optional[OrganizationConnection] = None
+    typename__: Optional[
+        Literal['RevokeEnterpriseOrganizationsMigratorRolePayload']
+    ] = Field('RevokeEnterpriseOrganizationsMigratorRolePayload', alias='__typename')
+
+
+class RevokeMigratorRolePayload(BaseModel):
+    """
+    Autogenerated return type of RevokeMigratorRole
+    """
+
+    clientMutationId: Optional[String] = None
+    success: Optional[Boolean] = None
+    typename__: Optional[Literal['RevokeMigratorRolePayload']] = Field(
+        'RevokeMigratorRolePayload', alias='__typename'
+    )
+
+
+class SavedReply(Node):
+    """
+    A Saved Reply is text a user can use to reply quickly.
+    """
+
+    body: String
+    bodyHTML: HTML
+    databaseId: Optional[Int] = None
+    id: ID
+    title: String
+    user: Optional[Actor] = None
+    typename__: Optional[Literal['SavedReply']] = Field(
+        'SavedReply', alias='__typename'
+    )
+
+
+class SavedReplyConnection(BaseModel):
+    """
+    The connection type for SavedReply.
+    """
+
+    edges: Optional[List[Optional[SavedReplyEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[SavedReply]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['SavedReplyConnection']] = Field(
+        'SavedReplyConnection', alias='__typename'
+    )
+
+
+class SavedReplyEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[SavedReply] = None
+    typename__: Optional[Literal['SavedReplyEdge']] = Field(
+        'SavedReplyEdge', alias='__typename'
+    )
+
+
+class SearchResultItemConnection(BaseModel):
+    """
+    A list of results that matched against a search query. Regardless of the number
+    of matches, a maximum of 1,000 results will be available across all types,
+    potentially split across many pages.
+    """
+
+    codeCount: Int
+    discussionCount: Int
+    edges: Optional[List[Optional[SearchResultItemEdge]]] = Field(default_factory=list)
+    issueCount: Int
+    nodes: Optional[List[Optional[SearchResultItem]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    repositoryCount: Int
+    userCount: Int
+    wikiCount: Int
+    typename__: Optional[Literal['SearchResultItemConnection']] = Field(
+        'SearchResultItemConnection', alias='__typename'
+    )
+
+
+class SearchResultItemEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[SearchResultItem] = None
+    textMatches: Optional[List[Optional[TextMatch]]] = Field(default_factory=list)
+    typename__: Optional[Literal['SearchResultItemEdge']] = Field(
+        'SearchResultItemEdge', alias='__typename'
+    )
+
+
+class SecurityAdvisory(Node):
+    """
+    A GitHub Security Advisory
+    """
+
+    classification: SecurityAdvisoryClassification
+    cvss: CVSS
+    cwes: CWEConnection
+    databaseId: Optional[Int] = None
+    description: String
+    ghsaId: String
+    id: ID
+    identifiers: List[SecurityAdvisoryIdentifier]
+    notificationsPermalink: Optional[URI] = None
+    origin: String
+    permalink: Optional[URI] = None
+    publishedAt: DateTime
+    references: List[SecurityAdvisoryReference]
+    severity: SecurityAdvisorySeverity
+    summary: String
+    updatedAt: DateTime
+    vulnerabilities: SecurityVulnerabilityConnection
+    withdrawnAt: Optional[DateTime] = None
+    typename__: Optional[Literal['SecurityAdvisory']] = Field(
+        'SecurityAdvisory', alias='__typename'
+    )
+
+
+class SecurityAdvisoryConnection(BaseModel):
+    """
+    The connection type for SecurityAdvisory.
+    """
+
+    edges: Optional[List[Optional[SecurityAdvisoryEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[SecurityAdvisory]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['SecurityAdvisoryConnection']] = Field(
+        'SecurityAdvisoryConnection', alias='__typename'
+    )
+
+
+class SecurityAdvisoryEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[SecurityAdvisory] = None
+    typename__: Optional[Literal['SecurityAdvisoryEdge']] = Field(
+        'SecurityAdvisoryEdge', alias='__typename'
+    )
+
+
+class SecurityAdvisoryIdentifier(BaseModel):
+    """
+    A GitHub Security Advisory Identifier
+    """
+
+    type: String
+    value: String
+    typename__: Optional[Literal['SecurityAdvisoryIdentifier']] = Field(
+        'SecurityAdvisoryIdentifier', alias='__typename'
+    )
+
+
+class SecurityAdvisoryPackage(BaseModel):
+    """
+    An individual package
+    """
+
+    ecosystem: SecurityAdvisoryEcosystem
+    name: String
+    typename__: Optional[Literal['SecurityAdvisoryPackage']] = Field(
+        'SecurityAdvisoryPackage', alias='__typename'
+    )
+
+
+class SecurityAdvisoryPackageVersion(BaseModel):
+    """
+    An individual package version
+    """
+
+    identifier: String
+    typename__: Optional[Literal['SecurityAdvisoryPackageVersion']] = Field(
+        'SecurityAdvisoryPackageVersion', alias='__typename'
+    )
+
+
+class SecurityAdvisoryReference(BaseModel):
+    """
+    A GitHub Security Advisory Reference
+    """
+
+    url: URI
+    typename__: Optional[Literal['SecurityAdvisoryReference']] = Field(
+        'SecurityAdvisoryReference', alias='__typename'
+    )
+
+
+class SecurityVulnerability(BaseModel):
+    """
+    An individual vulnerability within an Advisory
+    """
+
+    advisory: SecurityAdvisory
+    firstPatchedVersion: Optional[SecurityAdvisoryPackageVersion] = None
+    package: SecurityAdvisoryPackage
+    severity: SecurityAdvisorySeverity
+    updatedAt: DateTime
+    vulnerableVersionRange: String
+    typename__: Optional[Literal['SecurityVulnerability']] = Field(
+        'SecurityVulnerability', alias='__typename'
+    )
+
+
+class SecurityVulnerabilityConnection(BaseModel):
+    """
+    The connection type for SecurityVulnerability.
+    """
+
+    edges: Optional[List[Optional[SecurityVulnerabilityEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[SecurityVulnerability]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['SecurityVulnerabilityConnection']] = Field(
+        'SecurityVulnerabilityConnection', alias='__typename'
+    )
+
+
+class SecurityVulnerabilityEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[SecurityVulnerability] = None
+    typename__: Optional[Literal['SecurityVulnerabilityEdge']] = Field(
+        'SecurityVulnerabilityEdge', alias='__typename'
+    )
+
+
+class SetEnterpriseIdentityProviderPayload(BaseModel):
+    """
+    Autogenerated return type of SetEnterpriseIdentityProvider
+    """
+
+    clientMutationId: Optional[String] = None
+    identityProvider: Optional[EnterpriseIdentityProvider] = None
+    typename__: Optional[Literal['SetEnterpriseIdentityProviderPayload']] = Field(
+        'SetEnterpriseIdentityProviderPayload', alias='__typename'
+    )
+
+
+class SetOrganizationInteractionLimitPayload(BaseModel):
+    """
+    Autogenerated return type of SetOrganizationInteractionLimit
+    """
+
+    clientMutationId: Optional[String] = None
+    organization: Optional[Organization] = None
+    typename__: Optional[Literal['SetOrganizationInteractionLimitPayload']] = Field(
+        'SetOrganizationInteractionLimitPayload', alias='__typename'
+    )
+
+
+class SetRepositoryInteractionLimitPayload(BaseModel):
+    """
+    Autogenerated return type of SetRepositoryInteractionLimit
+    """
+
+    clientMutationId: Optional[String] = None
+    repository: Optional[Repository] = None
+    typename__: Optional[Literal['SetRepositoryInteractionLimitPayload']] = Field(
+        'SetRepositoryInteractionLimitPayload', alias='__typename'
+    )
+
+
+class SetUserInteractionLimitPayload(BaseModel):
+    """
+    Autogenerated return type of SetUserInteractionLimit
+    """
+
+    clientMutationId: Optional[String] = None
+    user: Optional[User] = None
+    typename__: Optional[Literal['SetUserInteractionLimitPayload']] = Field(
+        'SetUserInteractionLimitPayload', alias='__typename'
+    )
+
+
+class SmimeSignature(GitSignature):
+    """
+    Represents an S/MIME signature on a Commit or Tag.
+    """
+
+    email: String
+    isValid: Boolean
+    payload: String
+    signature: String
+    signer: Optional[User] = None
+    state: GitSignatureState
+    wasSignedByGitHub: Boolean
+    typename__: Optional[Literal['SmimeSignature']] = Field(
+        'SmimeSignature', alias='__typename'
+    )
+
+
+class SocialAccount(BaseModel):
+    """
+    Social media profile associated with a user.
+    """
+
+    displayName: String
+    provider: SocialAccountProvider
+    url: URI
+    typename__: Optional[Literal['SocialAccount']] = Field(
+        'SocialAccount', alias='__typename'
+    )
+
+
+class SocialAccountConnection(BaseModel):
+    """
+    The connection type for SocialAccount.
+    """
+
+    edges: Optional[List[Optional[SocialAccountEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[SocialAccount]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['SocialAccountConnection']] = Field(
+        'SocialAccountConnection', alias='__typename'
+    )
+
+
+class SocialAccountEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[SocialAccount] = None
+    typename__: Optional[Literal['SocialAccountEdge']] = Field(
+        'SocialAccountEdge', alias='__typename'
+    )
+
+
+class SponsorConnection(BaseModel):
+    """
+    The connection type for Sponsor.
+    """
+
+    edges: Optional[List[Optional[SponsorEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Sponsor]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['SponsorConnection']] = Field(
+        'SponsorConnection', alias='__typename'
+    )
+
+
+class SponsorEdge(BaseModel):
+    """
+    Represents a user or organization who is sponsoring someone in GitHub Sponsors.
+    """
+
+    cursor: String
+    node: Optional[Sponsor] = None
+    typename__: Optional[Literal['SponsorEdge']] = Field(
+        'SponsorEdge', alias='__typename'
+    )
+
+
+class SponsorableItemConnection(BaseModel):
+    """
+    The connection type for SponsorableItem.
+    """
+
+    edges: Optional[List[Optional[SponsorableItemEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[SponsorableItem]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['SponsorableItemConnection']] = Field(
+        'SponsorableItemConnection', alias='__typename'
+    )
+
+
+class SponsorableItemEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[SponsorableItem] = None
+    typename__: Optional[Literal['SponsorableItemEdge']] = Field(
+        'SponsorableItemEdge', alias='__typename'
+    )
+
+
+class SponsorsActivity(Node):
+    """
+    An event related to sponsorship activity.
+    """
+
+    action: SponsorsActivityAction
+    currentPrivacyLevel: Optional[SponsorshipPrivacy] = None
+    id: ID
+    paymentSource: Optional[SponsorshipPaymentSource] = None
+    previousSponsorsTier: Optional[SponsorsTier] = None
+    sponsor: Optional[Sponsor] = None
+    sponsorable: Sponsorable
+    sponsorsTier: Optional[SponsorsTier] = None
+    timestamp: Optional[DateTime] = None
+    viaBulkSponsorship: Boolean
+    typename__: Optional[Literal['SponsorsActivity']] = Field(
+        'SponsorsActivity', alias='__typename'
+    )
+
+
+class SponsorsActivityConnection(BaseModel):
+    """
+    The connection type for SponsorsActivity.
+    """
+
+    edges: Optional[List[Optional[SponsorsActivityEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[SponsorsActivity]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['SponsorsActivityConnection']] = Field(
+        'SponsorsActivityConnection', alias='__typename'
+    )
+
+
+class SponsorsActivityEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[SponsorsActivity] = None
+    typename__: Optional[Literal['SponsorsActivityEdge']] = Field(
+        'SponsorsActivityEdge', alias='__typename'
+    )
+
+
+class SponsorsGoal(BaseModel):
+    """
+    A goal associated with a GitHub Sponsors listing, representing a target the sponsored maintainer would like to attain.
+    """
+
+    description: Optional[String] = None
+    kind: SponsorsGoalKind
+    percentComplete: Int
+    targetValue: Int
+    title: String
+    typename__: Optional[Literal['SponsorsGoal']] = Field(
+        'SponsorsGoal', alias='__typename'
+    )
+
+
+class SponsorsListing(Node):
+    """
+    A GitHub Sponsors listing.
+    """
+
+    activeGoal: Optional[SponsorsGoal] = None
+    activeStripeConnectAccount: Optional[StripeConnectAccount] = None
+    billingCountryOrRegion: Optional[String] = None
+    contactEmailAddress: Optional[String] = None
+    createdAt: DateTime
+    dashboardResourcePath: URI
+    dashboardUrl: URI
+    featuredItems: List[SponsorsListingFeaturedItem]
+    fiscalHost: Optional[Organization] = None
+    fullDescription: String
+    fullDescriptionHTML: HTML
+    id: ID
+    isPublic: Boolean
+    name: String
+    nextPayoutDate: Optional[Date] = None
+    residenceCountryOrRegion: Optional[String] = None
+    resourcePath: URI
+    shortDescription: String
+    slug: String
+    sponsorable: Sponsorable
+    tiers: Optional[SponsorsTierConnection] = None
+    url: URI
+    typename__: Optional[Literal['SponsorsListing']] = Field(
+        'SponsorsListing', alias='__typename'
+    )
+
+
+class SponsorsListingFeaturedItem(Node):
+    """
+    A record that is promoted on a GitHub Sponsors profile.
+    """
+
+    createdAt: DateTime
+    description: Optional[String] = None
+    featureable: SponsorsListingFeatureableItem
+    id: ID
+    position: Int
+    sponsorsListing: SponsorsListing
+    updatedAt: DateTime
+    typename__: Optional[Literal['SponsorsListingFeaturedItem']] = Field(
+        'SponsorsListingFeaturedItem', alias='__typename'
+    )
+
+
+class SponsorsTier(Node):
+    """
+    A GitHub Sponsors tier associated with a GitHub Sponsors listing.
+    """
+
+    adminInfo: Optional[SponsorsTierAdminInfo] = None
+    closestLesserValueTier: Optional[SponsorsTier] = None
+    createdAt: DateTime
+    description: String
+    descriptionHTML: HTML
+    id: ID
+    isCustomAmount: Boolean
+    isOneTime: Boolean
+    monthlyPriceInCents: Int
+    monthlyPriceInDollars: Int
+    name: String
+    sponsorsListing: SponsorsListing
+    updatedAt: DateTime
+    typename__: Optional[Literal['SponsorsTier']] = Field(
+        'SponsorsTier', alias='__typename'
+    )
+
+
+class SponsorsTierAdminInfo(BaseModel):
+    """
+    SponsorsTier information only visible to users that can administer the associated Sponsors listing.
+    """
+
+    isDraft: Boolean
+    isPublished: Boolean
+    isRetired: Boolean
+    sponsorships: SponsorshipConnection
+    typename__: Optional[Literal['SponsorsTierAdminInfo']] = Field(
+        'SponsorsTierAdminInfo', alias='__typename'
+    )
+
+
+class SponsorsTierConnection(BaseModel):
+    """
+    The connection type for SponsorsTier.
+    """
+
+    edges: Optional[List[Optional[SponsorsTierEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[SponsorsTier]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['SponsorsTierConnection']] = Field(
+        'SponsorsTierConnection', alias='__typename'
+    )
+
+
+class SponsorsTierEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[SponsorsTier] = None
+    typename__: Optional[Literal['SponsorsTierEdge']] = Field(
+        'SponsorsTierEdge', alias='__typename'
+    )
+
+
+class Sponsorship(Node):
+    """
+    A sponsorship relationship between a sponsor and a maintainer
+    """
+
+    createdAt: DateTime
+    id: ID
+    isActive: Boolean
+    isOneTimePayment: Boolean
+    isSponsorOptedIntoEmail: Optional[Boolean] = None
+    maintainer: User
+    paymentSource: Optional[SponsorshipPaymentSource] = None
+    privacyLevel: SponsorshipPrivacy
+    sponsor: Optional[User] = None
+    sponsorEntity: Optional[Sponsor] = None
+    sponsorable: Sponsorable
+    tier: Optional[SponsorsTier] = None
+    tierSelectedAt: Optional[DateTime] = None
+    typename__: Optional[Literal['Sponsorship']] = Field(
+        'Sponsorship', alias='__typename'
+    )
+
+
+class SponsorshipConnection(BaseModel):
+    """
+    The connection type for Sponsorship.
+    """
+
+    edges: Optional[List[Optional[SponsorshipEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Sponsorship]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    totalRecurringMonthlyPriceInCents: Int
+    totalRecurringMonthlyPriceInDollars: Int
+    typename__: Optional[Literal['SponsorshipConnection']] = Field(
+        'SponsorshipConnection', alias='__typename'
+    )
+
+
+class SponsorshipEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Sponsorship] = None
+    typename__: Optional[Literal['SponsorshipEdge']] = Field(
+        'SponsorshipEdge', alias='__typename'
+    )
+
+
+class SponsorshipNewsletter(Node):
+    """
+    An update sent to sponsors of a user or organization on GitHub Sponsors.
+    """
+
+    author: Optional[User] = None
+    body: String
+    createdAt: DateTime
+    id: ID
+    isPublished: Boolean
+    sponsorable: Sponsorable
+    subject: String
+    updatedAt: DateTime
+    typename__: Optional[Literal['SponsorshipNewsletter']] = Field(
+        'SponsorshipNewsletter', alias='__typename'
+    )
+
+
+class SponsorshipNewsletterConnection(BaseModel):
+    """
+    The connection type for SponsorshipNewsletter.
+    """
+
+    edges: Optional[List[Optional[SponsorshipNewsletterEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[SponsorshipNewsletter]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['SponsorshipNewsletterConnection']] = Field(
+        'SponsorshipNewsletterConnection', alias='__typename'
+    )
+
+
+class SponsorshipNewsletterEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[SponsorshipNewsletter] = None
+    typename__: Optional[Literal['SponsorshipNewsletterEdge']] = Field(
+        'SponsorshipNewsletterEdge', alias='__typename'
+    )
+
+
+class SshSignature(GitSignature):
+    """
+    Represents an SSH signature on a Commit or Tag.
+    """
+
+    email: String
+    isValid: Boolean
+    keyFingerprint: Optional[String] = None
+    payload: String
+    signature: String
+    signer: Optional[User] = None
+    state: GitSignatureState
+    wasSignedByGitHub: Boolean
+    typename__: Optional[Literal['SshSignature']] = Field(
+        'SshSignature', alias='__typename'
+    )
+
+
+class StargazerConnection(BaseModel):
+    """
+    The connection type for User.
+    """
+
+    edges: Optional[List[Optional[StargazerEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[User]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['StargazerConnection']] = Field(
+        'StargazerConnection', alias='__typename'
+    )
+
+
+class StargazerEdge(BaseModel):
+    """
+    Represents a user that's starred a repository.
+    """
+
+    cursor: String
+    node: User
+    starredAt: DateTime
+    typename__: Optional[Literal['StargazerEdge']] = Field(
+        'StargazerEdge', alias='__typename'
+    )
+
+
+class StarredRepositoryConnection(BaseModel):
+    """
+    The connection type for Repository.
+    """
+
+    edges: Optional[List[Optional[StarredRepositoryEdge]]] = Field(default_factory=list)
+    isOverLimit: Boolean
+    nodes: Optional[List[Optional[Repository]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['StarredRepositoryConnection']] = Field(
+        'StarredRepositoryConnection', alias='__typename'
+    )
+
+
+class StarredRepositoryEdge(BaseModel):
+    """
+    Represents a starred repository.
+    """
+
+    cursor: String
+    node: Repository
+    starredAt: DateTime
+    typename__: Optional[Literal['StarredRepositoryEdge']] = Field(
+        'StarredRepositoryEdge', alias='__typename'
+    )
+
+
+class StartOrganizationMigrationPayload(BaseModel):
+    """
+    Autogenerated return type of StartOrganizationMigration
+    """
+
+    clientMutationId: Optional[String] = None
+    orgMigration: Optional[OrganizationMigration] = None
+    typename__: Optional[Literal['StartOrganizationMigrationPayload']] = Field(
+        'StartOrganizationMigrationPayload', alias='__typename'
+    )
+
+
+class StartRepositoryMigrationPayload(BaseModel):
+    """
+    Autogenerated return type of StartRepositoryMigration
+    """
+
+    clientMutationId: Optional[String] = None
+    repositoryMigration: Optional[RepositoryMigration] = None
+    typename__: Optional[Literal['StartRepositoryMigrationPayload']] = Field(
+        'StartRepositoryMigrationPayload', alias='__typename'
+    )
+
+
+class Status(Node):
+    """
+    Represents a commit status.
+    """
+
+    combinedContexts: StatusCheckRollupContextConnection
+    commit: Optional[Commit] = None
+    context: Optional[StatusContext] = None
+    contexts: List[StatusContext]
+    id: ID
+    state: StatusState
+    typename__: Optional[Literal['Status']] = Field('Status', alias='__typename')
+
+
+class StatusCheckConfiguration(BaseModel):
+    """
+    Required status check
+    """
+
+    context: String
+    integrationId: Optional[Int] = None
+    typename__: Optional[Literal['StatusCheckConfiguration']] = Field(
+        'StatusCheckConfiguration', alias='__typename'
+    )
+
+
+class StatusCheckRollup(Node):
+    """
+    Represents the rollup for both the check runs and status for a commit.
+    """
+
+    commit: Optional[Commit] = None
+    contexts: StatusCheckRollupContextConnection
+    id: ID
+    state: StatusState
+    typename__: Optional[Literal['StatusCheckRollup']] = Field(
+        'StatusCheckRollup', alias='__typename'
+    )
+
+
+class StatusCheckRollupContextConnection(BaseModel):
+    """
+    The connection type for StatusCheckRollupContext.
+    """
+
+    checkRunCount: Int
+    checkRunCountsByState: Optional[List[CheckRunStateCount]] = Field(
+        default_factory=list
+    )
+    edges: Optional[List[Optional[StatusCheckRollupContextEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[StatusCheckRollupContext]]] = Field(
+        default_factory=list
+    )
+    pageInfo: PageInfo
+    statusContextCount: Int
+    statusContextCountsByState: Optional[List[StatusContextStateCount]] = Field(
+        default_factory=list
+    )
+    totalCount: Int
+    typename__: Optional[Literal['StatusCheckRollupContextConnection']] = Field(
+        'StatusCheckRollupContextConnection', alias='__typename'
+    )
+
+
+class StatusCheckRollupContextEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[StatusCheckRollupContext] = None
+    typename__: Optional[Literal['StatusCheckRollupContextEdge']] = Field(
+        'StatusCheckRollupContextEdge', alias='__typename'
+    )
+
+
+class StatusContext(Node, RequirableByPullRequest):
+    """
+    Represents an individual commit status context
+    """
+
+    avatarUrl: Optional[URI] = None
+    commit: Optional[Commit] = None
+    context: String
+    createdAt: DateTime
+    creator: Optional[Actor] = None
+    description: Optional[String] = None
+    id: ID
+    isRequired: Boolean
+    state: StatusState
+    targetUrl: Optional[URI] = None
+    typename__: Optional[Literal['StatusContext']] = Field(
+        'StatusContext', alias='__typename'
+    )
+
+
+class StatusContextStateCount(BaseModel):
+    """
+    Represents a count of the state of a status context.
+    """
+
+    count: Int
+    state: StatusState
+    typename__: Optional[Literal['StatusContextStateCount']] = Field(
+        'StatusContextStateCount', alias='__typename'
+    )
+
+
+class StripeConnectAccount(BaseModel):
+    """
+    A Stripe Connect account for receiving sponsorship funds from GitHub Sponsors.
+    """
+
+    accountId: String
+    billingCountryOrRegion: Optional[String] = None
+    countryOrRegion: Optional[String] = None
+    isActive: Boolean
+    sponsorsListing: SponsorsListing
+    stripeDashboardUrl: URI
+    typename__: Optional[Literal['StripeConnectAccount']] = Field(
+        'StripeConnectAccount', alias='__typename'
+    )
+
+
+class SubmitPullRequestReviewPayload(BaseModel):
+    """
+    Autogenerated return type of SubmitPullRequestReview
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequestReview: Optional[PullRequestReview] = None
+    typename__: Optional[Literal['SubmitPullRequestReviewPayload']] = Field(
+        'SubmitPullRequestReviewPayload', alias='__typename'
+    )
+
+
+class Submodule(BaseModel):
+    """
+    A pointer to a repository at a specific revision embedded inside another repository.
+    """
+
+    branch: Optional[String] = None
+    gitUrl: URI
+    name: String
+    nameRaw: Base64String
+    path: String
+    pathRaw: Base64String
+    subprojectCommitOid: Optional[GitObjectID] = None
+    typename__: Optional[Literal['Submodule']] = Field('Submodule', alias='__typename')
+
+
+class SubmoduleConnection(BaseModel):
+    """
+    The connection type for Submodule.
+    """
+
+    edges: Optional[List[Optional[SubmoduleEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Submodule]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['SubmoduleConnection']] = Field(
+        'SubmoduleConnection', alias='__typename'
+    )
+
+
+class SubmoduleEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Submodule] = None
+    typename__: Optional[Literal['SubmoduleEdge']] = Field(
+        'SubmoduleEdge', alias='__typename'
+    )
+
+
+class SubscribedEvent(Node):
+    """
+    Represents a 'subscribed' event on a given `Subscribable`.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    subscribable: Subscribable
+    typename__: Optional[Literal['SubscribedEvent']] = Field(
+        'SubscribedEvent', alias='__typename'
+    )
+
+
+class SuggestedReviewer(BaseModel):
+    """
+    A suggestion to review a pull request based on a user's commit history and review comments.
+    """
+
+    isAuthor: Boolean
+    isCommenter: Boolean
+    reviewer: User
+    typename__: Optional[Literal['SuggestedReviewer']] = Field(
+        'SuggestedReviewer', alias='__typename'
+    )
+
+
+class Tag(GitObject, Node):
+    """
+    Represents a Git tag.
+    """
+
+    abbreviatedOid: String
+    commitResourcePath: URI
+    commitUrl: URI
+    id: ID
+    message: Optional[String] = None
+    name: String
+    oid: GitObjectID
+    repository: Repository
+    tagger: Optional[GitActor] = None
+    target: GitObject
+    typename__: Optional[Literal['Tag']] = Field('Tag', alias='__typename')
+
+
+class TagNamePatternParameters(BaseModel):
+    """
+    Parameters to be used for the tag_name_pattern rule
+    """
+
+    name: Optional[String] = None
+    negate: Boolean
+    operator: String
+    pattern: String
+    typename__: Optional[Literal['TagNamePatternParameters']] = Field(
+        'TagNamePatternParameters', alias='__typename'
+    )
+
+
+class Team(MemberStatusable, Node, Subscribable):
+    """
+    A team of users in an organization.
+    """
+
+    ancestors: TeamConnection
+    avatarUrl: Optional[URI] = None
+    childTeams: TeamConnection
+    combinedSlug: String
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    description: Optional[String] = None
+    discussion: Optional[TeamDiscussion] = None
+    discussions: TeamDiscussionConnection
+    discussionsResourcePath: URI
+    discussionsUrl: URI
+    editTeamResourcePath: URI
+    editTeamUrl: URI
+    id: ID
+    invitations: Optional[OrganizationInvitationConnection] = None
+    memberStatuses: UserStatusConnection
+    members: TeamMemberConnection
+    membersResourcePath: URI
+    membersUrl: URI
+    name: String
+    newTeamResourcePath: URI
+    newTeamUrl: URI
+    notificationSetting: TeamNotificationSetting
+    organization: Organization
+    parentTeam: Optional[Team] = None
+    privacy: TeamPrivacy
+    projectV2: Optional[ProjectV2] = None
+    projectsV2: ProjectV2Connection
+    repositories: TeamRepositoryConnection
+    repositoriesResourcePath: URI
+    repositoriesUrl: URI
+    resourcePath: URI
+    reviewRequestDelegationAlgorithm: Optional[TeamReviewAssignmentAlgorithm] = None
+    reviewRequestDelegationEnabled: Boolean
+    reviewRequestDelegationMemberCount: Optional[Int] = None
+    reviewRequestDelegationNotifyTeam: Boolean
+    slug: String
+    teamsResourcePath: URI
+    teamsUrl: URI
+    updatedAt: DateTime
+    url: URI
+    viewerCanAdminister: Boolean
+    viewerCanSubscribe: Boolean
+    viewerSubscription: Optional[SubscriptionState] = None
+    typename__: Optional[Literal['Team']] = Field('Team', alias='__typename')
+
+
+class TeamAddMemberAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, TeamAuditEntryData
+):
+    """
+    Audit log entry for a team.add_member event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    isLdapMapped: Optional[Boolean] = None
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    team: Optional[Team] = None
+    teamName: Optional[String] = None
+    teamResourcePath: Optional[URI] = None
+    teamUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['TeamAddMemberAuditEntry']] = Field(
+        'TeamAddMemberAuditEntry', alias='__typename'
+    )
+
+
+class TeamAddRepositoryAuditEntry(
+    AuditEntry,
+    Node,
+    OrganizationAuditEntryData,
+    RepositoryAuditEntryData,
+    TeamAuditEntryData,
+):
+    """
+    Audit log entry for a team.add_repository event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    isLdapMapped: Optional[Boolean] = None
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    team: Optional[Team] = None
+    teamName: Optional[String] = None
+    teamResourcePath: Optional[URI] = None
+    teamUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['TeamAddRepositoryAuditEntry']] = Field(
+        'TeamAddRepositoryAuditEntry', alias='__typename'
+    )
+
+
+class TeamChangeParentTeamAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, TeamAuditEntryData
+):
+    """
+    Audit log entry for a team.change_parent_team event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    isLdapMapped: Optional[Boolean] = None
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    parentTeam: Optional[Team] = None
+    parentTeamName: Optional[String] = None
+    parentTeamNameWas: Optional[String] = None
+    parentTeamResourcePath: Optional[URI] = None
+    parentTeamUrl: Optional[URI] = None
+    parentTeamWas: Optional[Team] = None
+    parentTeamWasResourcePath: Optional[URI] = None
+    parentTeamWasUrl: Optional[URI] = None
+    team: Optional[Team] = None
+    teamName: Optional[String] = None
+    teamResourcePath: Optional[URI] = None
+    teamUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['TeamChangeParentTeamAuditEntry']] = Field(
+        'TeamChangeParentTeamAuditEntry', alias='__typename'
+    )
+
+
+class TeamConnection(BaseModel):
+    """
+    The connection type for Team.
+    """
+
+    edges: Optional[List[Optional[TeamEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Team]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['TeamConnection']] = Field(
+        'TeamConnection', alias='__typename'
+    )
+
+
+class TeamDiscussion(
+    Comment,
+    Deletable,
+    Node,
+    Reactable,
+    Subscribable,
+    UniformResourceLocatable,
+    Updatable,
+    UpdatableComment,
+):
+    """
+    A team discussion.
+    """
+
+    author: Optional[Actor] = None
+    authorAssociation: CommentAuthorAssociation
+    body: String
+    bodyHTML: HTML
+    bodyText: String
+    bodyVersion: String
+    comments: TeamDiscussionCommentConnection
+    commentsResourcePath: URI
+    commentsUrl: URI
+    createdAt: DateTime
+    createdViaEmail: Boolean
+    databaseId: Optional[Int] = None
+    editor: Optional[Actor] = None
+    id: ID
+    includesCreatedEdit: Boolean
+    isPinned: Boolean
+    isPrivate: Boolean
+    lastEditedAt: Optional[DateTime] = None
+    number: Int
+    publishedAt: Optional[DateTime] = None
+    reactionGroups: Optional[List[ReactionGroup]] = Field(default_factory=list)
+    reactions: ReactionConnection
+    resourcePath: URI
+    team: Team
+    title: String
+    updatedAt: DateTime
+    url: URI
+    userContentEdits: Optional[UserContentEditConnection] = None
+    viewerCanDelete: Boolean
+    viewerCanPin: Boolean
+    viewerCanReact: Boolean
+    viewerCanSubscribe: Boolean
+    viewerCanUpdate: Boolean
+    viewerCannotUpdateReasons: List[CommentCannotUpdateReason]
+    viewerDidAuthor: Boolean
+    viewerSubscription: Optional[SubscriptionState] = None
+    typename__: Optional[Literal['TeamDiscussion']] = Field(
+        'TeamDiscussion', alias='__typename'
+    )
+
+
+class TeamDiscussionComment(
+    Comment,
+    Deletable,
+    Node,
+    Reactable,
+    UniformResourceLocatable,
+    Updatable,
+    UpdatableComment,
+):
+    """
+    A comment on a team discussion.
+    """
+
+    author: Optional[Actor] = None
+    authorAssociation: CommentAuthorAssociation
+    body: String
+    bodyHTML: HTML
+    bodyText: String
+    bodyVersion: String
+    createdAt: DateTime
+    createdViaEmail: Boolean
+    databaseId: Optional[Int] = None
+    discussion: TeamDiscussion
+    editor: Optional[Actor] = None
+    id: ID
+    includesCreatedEdit: Boolean
+    lastEditedAt: Optional[DateTime] = None
+    number: Int
+    publishedAt: Optional[DateTime] = None
+    reactionGroups: Optional[List[ReactionGroup]] = Field(default_factory=list)
+    reactions: ReactionConnection
+    resourcePath: URI
+    updatedAt: DateTime
+    url: URI
+    userContentEdits: Optional[UserContentEditConnection] = None
+    viewerCanDelete: Boolean
+    viewerCanReact: Boolean
+    viewerCanUpdate: Boolean
+    viewerCannotUpdateReasons: List[CommentCannotUpdateReason]
+    viewerDidAuthor: Boolean
+    typename__: Optional[Literal['TeamDiscussionComment']] = Field(
+        'TeamDiscussionComment', alias='__typename'
+    )
+
+
+class TeamDiscussionCommentConnection(BaseModel):
+    """
+    The connection type for TeamDiscussionComment.
+    """
+
+    edges: Optional[List[Optional[TeamDiscussionCommentEdge]]] = Field(
+        default_factory=list
+    )
+    nodes: Optional[List[Optional[TeamDiscussionComment]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['TeamDiscussionCommentConnection']] = Field(
+        'TeamDiscussionCommentConnection', alias='__typename'
+    )
+
+
+class TeamDiscussionCommentEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[TeamDiscussionComment] = None
+    typename__: Optional[Literal['TeamDiscussionCommentEdge']] = Field(
+        'TeamDiscussionCommentEdge', alias='__typename'
+    )
+
+
+class TeamDiscussionConnection(BaseModel):
+    """
+    The connection type for TeamDiscussion.
+    """
+
+    edges: Optional[List[Optional[TeamDiscussionEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[TeamDiscussion]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['TeamDiscussionConnection']] = Field(
+        'TeamDiscussionConnection', alias='__typename'
+    )
+
+
+class TeamDiscussionEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[TeamDiscussion] = None
+    typename__: Optional[Literal['TeamDiscussionEdge']] = Field(
+        'TeamDiscussionEdge', alias='__typename'
+    )
+
+
+class TeamEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[Team] = None
+    typename__: Optional[Literal['TeamEdge']] = Field('TeamEdge', alias='__typename')
+
+
+class TeamMemberConnection(BaseModel):
+    """
+    The connection type for User.
+    """
+
+    edges: Optional[List[Optional[TeamMemberEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[User]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['TeamMemberConnection']] = Field(
+        'TeamMemberConnection', alias='__typename'
+    )
+
+
+class TeamMemberEdge(BaseModel):
+    """
+    Represents a user who is a member of a team.
+    """
+
+    cursor: String
+    memberAccessResourcePath: URI
+    memberAccessUrl: URI
+    node: User
+    role: TeamMemberRole
+    typename__: Optional[Literal['TeamMemberEdge']] = Field(
+        'TeamMemberEdge', alias='__typename'
+    )
+
+
+class TeamRemoveMemberAuditEntry(
+    AuditEntry, Node, OrganizationAuditEntryData, TeamAuditEntryData
+):
+    """
+    Audit log entry for a team.remove_member event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    isLdapMapped: Optional[Boolean] = None
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    team: Optional[Team] = None
+    teamName: Optional[String] = None
+    teamResourcePath: Optional[URI] = None
+    teamUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['TeamRemoveMemberAuditEntry']] = Field(
+        'TeamRemoveMemberAuditEntry', alias='__typename'
+    )
+
+
+class TeamRemoveRepositoryAuditEntry(
+    AuditEntry,
+    Node,
+    OrganizationAuditEntryData,
+    RepositoryAuditEntryData,
+    TeamAuditEntryData,
+):
+    """
+    Audit log entry for a team.remove_repository event.
+    """
+
+    action: String
+    actor: Optional[AuditEntryActor] = None
+    actorIp: Optional[String] = None
+    actorLocation: Optional[ActorLocation] = None
+    actorLogin: Optional[String] = None
+    actorResourcePath: Optional[URI] = None
+    actorUrl: Optional[URI] = None
+    createdAt: PreciseDateTime
+    id: ID
+    isLdapMapped: Optional[Boolean] = None
+    operationType: Optional[OperationType] = None
+    organization: Optional[Organization] = None
+    organizationName: Optional[String] = None
+    organizationResourcePath: Optional[URI] = None
+    organizationUrl: Optional[URI] = None
+    repository: Optional[Repository] = None
+    repositoryName: Optional[String] = None
+    repositoryResourcePath: Optional[URI] = None
+    repositoryUrl: Optional[URI] = None
+    team: Optional[Team] = None
+    teamName: Optional[String] = None
+    teamResourcePath: Optional[URI] = None
+    teamUrl: Optional[URI] = None
+    user: Optional[User] = None
+    userLogin: Optional[String] = None
+    userResourcePath: Optional[URI] = None
+    userUrl: Optional[URI] = None
+    typename__: Optional[Literal['TeamRemoveRepositoryAuditEntry']] = Field(
+        'TeamRemoveRepositoryAuditEntry', alias='__typename'
+    )
+
+
+class TeamRepositoryConnection(BaseModel):
+    """
+    The connection type for Repository.
+    """
+
+    edges: Optional[List[Optional[TeamRepositoryEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[Repository]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['TeamRepositoryConnection']] = Field(
+        'TeamRepositoryConnection', alias='__typename'
+    )
+
+
+class TeamRepositoryEdge(BaseModel):
+    """
+    Represents a team repository.
+    """
+
+    cursor: String
+    node: Repository
+    permission: RepositoryPermission
+    typename__: Optional[Literal['TeamRepositoryEdge']] = Field(
+        'TeamRepositoryEdge', alias='__typename'
+    )
+
+
+class TextMatch(BaseModel):
+    """
+    A text match within a search result.
+    """
+
+    fragment: String
+    highlights: List[TextMatchHighlight]
+    property: String
+    typename__: Optional[Literal['TextMatch']] = Field('TextMatch', alias='__typename')
+
+
+class TextMatchHighlight(BaseModel):
+    """
+    Represents a single highlight in a search result match.
+    """
+
+    beginIndice: Int
+    endIndice: Int
+    text: String
+    typename__: Optional[Literal['TextMatchHighlight']] = Field(
+        'TextMatchHighlight', alias='__typename'
+    )
+
+
+class Topic(Node, Starrable):
+    """
+    A topic aggregates entities that are related to a subject.
+    """
+
+    id: ID
+    name: String
+    relatedTopics: List[Topic]
+    repositories: RepositoryConnection
+    stargazerCount: Int
+    stargazers: StargazerConnection
+    viewerHasStarred: Boolean
+    typename__: Optional[Literal['Topic']] = Field('Topic', alias='__typename')
+
+
+class TransferEnterpriseOrganizationPayload(BaseModel):
+    """
+    Autogenerated return type of TransferEnterpriseOrganization
+    """
+
+    clientMutationId: Optional[String] = None
+    organization: Optional[Organization] = None
+    typename__: Optional[Literal['TransferEnterpriseOrganizationPayload']] = Field(
+        'TransferEnterpriseOrganizationPayload', alias='__typename'
+    )
+
+
+class TransferIssuePayload(BaseModel):
+    """
+    Autogenerated return type of TransferIssue
+    """
+
+    clientMutationId: Optional[String] = None
+    issue: Optional[Issue] = None
+    typename__: Optional[Literal['TransferIssuePayload']] = Field(
+        'TransferIssuePayload', alias='__typename'
+    )
+
+
+class TransferredEvent(Node):
+    """
+    Represents a 'transferred' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    fromRepository: Optional[Repository] = None
+    id: ID
+    issue: Issue
+    typename__: Optional[Literal['TransferredEvent']] = Field(
+        'TransferredEvent', alias='__typename'
+    )
+
+
+class Tree(GitObject, Node):
+    """
+    Represents a Git tree.
+    """
+
+    abbreviatedOid: String
+    commitResourcePath: URI
+    commitUrl: URI
+    entries: Optional[List[TreeEntry]] = Field(default_factory=list)
+    id: ID
+    oid: GitObjectID
+    repository: Repository
+    typename__: Optional[Literal['Tree']] = Field('Tree', alias='__typename')
+
+
+class TreeEntry(BaseModel):
+    """
+    Represents a Git tree entry.
+    """
+
+    extension: Optional[String] = None
+    isGenerated: Boolean
+    language: Optional[Language] = None
+    lineCount: Optional[Int] = None
+    mode: Int
+    name: String
+    nameRaw: Base64String
+    object: Optional[GitObject] = None
+    oid: GitObjectID
+    path: Optional[String] = None
+    pathRaw: Optional[Base64String] = None
+    repository: Repository
+    size: Int
+    submodule: Optional[Submodule] = None
+    type: String
+    typename__: Optional[Literal['TreeEntry']] = Field('TreeEntry', alias='__typename')
+
+
+class UnarchiveProjectV2ItemPayload(BaseModel):
+    """
+    Autogenerated return type of UnarchiveProjectV2Item
+    """
+
+    clientMutationId: Optional[String] = None
+    item: Optional[ProjectV2Item] = None
+    typename__: Optional[Literal['UnarchiveProjectV2ItemPayload']] = Field(
+        'UnarchiveProjectV2ItemPayload', alias='__typename'
+    )
+
+
+class UnarchiveRepositoryPayload(BaseModel):
+    """
+    Autogenerated return type of UnarchiveRepository
+    """
+
+    clientMutationId: Optional[String] = None
+    repository: Optional[Repository] = None
+    typename__: Optional[Literal['UnarchiveRepositoryPayload']] = Field(
+        'UnarchiveRepositoryPayload', alias='__typename'
+    )
+
+
+class UnassignedEvent(Node):
+    """
+    Represents an 'unassigned' event on any assignable object.
+    """
+
+    actor: Optional[Actor] = None
+    assignable: Assignable
+    assignee: Optional[Assignee] = None
+    createdAt: DateTime
+    id: ID
+    user: Optional[User] = None
+    typename__: Optional[Literal['UnassignedEvent']] = Field(
+        'UnassignedEvent', alias='__typename'
+    )
+
+
+class UnfollowOrganizationPayload(BaseModel):
+    """
+    Autogenerated return type of UnfollowOrganization
+    """
+
+    clientMutationId: Optional[String] = None
+    organization: Optional[Organization] = None
+    typename__: Optional[Literal['UnfollowOrganizationPayload']] = Field(
+        'UnfollowOrganizationPayload', alias='__typename'
+    )
+
+
+class UnfollowUserPayload(BaseModel):
+    """
+    Autogenerated return type of UnfollowUser
+    """
+
+    clientMutationId: Optional[String] = None
+    user: Optional[User] = None
+    typename__: Optional[Literal['UnfollowUserPayload']] = Field(
+        'UnfollowUserPayload', alias='__typename'
+    )
+
+
+class UnknownSignature(GitSignature):
+    """
+    Represents an unknown signature on a Commit or Tag.
+    """
+
+    email: String
+    isValid: Boolean
+    payload: String
+    signature: String
+    signer: Optional[User] = None
+    state: GitSignatureState
+    wasSignedByGitHub: Boolean
+    typename__: Optional[Literal['UnknownSignature']] = Field(
+        'UnknownSignature', alias='__typename'
+    )
+
+
+class UnlabeledEvent(Node):
+    """
+    Represents an 'unlabeled' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    label: Label
+    labelable: Labelable
+    typename__: Optional[Literal['UnlabeledEvent']] = Field(
+        'UnlabeledEvent', alias='__typename'
+    )
+
+
+class UnlinkProjectV2FromRepositoryPayload(BaseModel):
+    """
+    Autogenerated return type of UnlinkProjectV2FromRepository
+    """
+
+    clientMutationId: Optional[String] = None
+    repository: Optional[Repository] = None
+    typename__: Optional[Literal['UnlinkProjectV2FromRepositoryPayload']] = Field(
+        'UnlinkProjectV2FromRepositoryPayload', alias='__typename'
+    )
+
+
+class UnlinkProjectV2FromTeamPayload(BaseModel):
+    """
+    Autogenerated return type of UnlinkProjectV2FromTeam
+    """
+
+    clientMutationId: Optional[String] = None
+    team: Optional[Team] = None
+    typename__: Optional[Literal['UnlinkProjectV2FromTeamPayload']] = Field(
+        'UnlinkProjectV2FromTeamPayload', alias='__typename'
+    )
+
+
+class UnlinkRepositoryFromProjectPayload(BaseModel):
+    """
+    Autogenerated return type of UnlinkRepositoryFromProject
+    """
+
+    clientMutationId: Optional[String] = None
+    project: Optional[Project] = None
+    repository: Optional[Repository] = None
+    typename__: Optional[Literal['UnlinkRepositoryFromProjectPayload']] = Field(
+        'UnlinkRepositoryFromProjectPayload', alias='__typename'
+    )
+
+
+class UnlockLockablePayload(BaseModel):
+    """
+    Autogenerated return type of UnlockLockable
+    """
+
+    actor: Optional[Actor] = None
+    clientMutationId: Optional[String] = None
+    unlockedRecord: Optional[Lockable] = None
+    typename__: Optional[Literal['UnlockLockablePayload']] = Field(
+        'UnlockLockablePayload', alias='__typename'
+    )
+
+
+class UnlockedEvent(Node):
+    """
+    Represents an 'unlocked' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    lockable: Lockable
+    typename__: Optional[Literal['UnlockedEvent']] = Field(
+        'UnlockedEvent', alias='__typename'
+    )
+
+
+class UnmarkDiscussionCommentAsAnswerPayload(BaseModel):
+    """
+    Autogenerated return type of UnmarkDiscussionCommentAsAnswer
+    """
+
+    clientMutationId: Optional[String] = None
+    discussion: Optional[Discussion] = None
+    typename__: Optional[Literal['UnmarkDiscussionCommentAsAnswerPayload']] = Field(
+        'UnmarkDiscussionCommentAsAnswerPayload', alias='__typename'
+    )
+
+
+class UnmarkFileAsViewedPayload(BaseModel):
+    """
+    Autogenerated return type of UnmarkFileAsViewed
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['UnmarkFileAsViewedPayload']] = Field(
+        'UnmarkFileAsViewedPayload', alias='__typename'
+    )
+
+
+class UnmarkIssueAsDuplicatePayload(BaseModel):
+    """
+    Autogenerated return type of UnmarkIssueAsDuplicate
+    """
+
+    clientMutationId: Optional[String] = None
+    duplicate: Optional[IssueOrPullRequest] = None
+    typename__: Optional[Literal['UnmarkIssueAsDuplicatePayload']] = Field(
+        'UnmarkIssueAsDuplicatePayload', alias='__typename'
+    )
+
+
+class UnmarkProjectV2AsTemplatePayload(BaseModel):
+    """
+    Autogenerated return type of UnmarkProjectV2AsTemplate
+    """
+
+    clientMutationId: Optional[String] = None
+    projectV2: Optional[ProjectV2] = None
+    typename__: Optional[Literal['UnmarkProjectV2AsTemplatePayload']] = Field(
+        'UnmarkProjectV2AsTemplatePayload', alias='__typename'
+    )
+
+
+class UnmarkedAsDuplicateEvent(Node):
+    """
+    Represents an 'unmarked_as_duplicate' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    canonical: Optional[IssueOrPullRequest] = None
+    createdAt: DateTime
+    duplicate: Optional[IssueOrPullRequest] = None
+    id: ID
+    isCrossRepository: Boolean
+    typename__: Optional[Literal['UnmarkedAsDuplicateEvent']] = Field(
+        'UnmarkedAsDuplicateEvent', alias='__typename'
+    )
+
+
+class UnminimizeCommentPayload(BaseModel):
+    """
+    Autogenerated return type of UnminimizeComment
+    """
+
+    clientMutationId: Optional[String] = None
+    unminimizedComment: Optional[Minimizable] = None
+    typename__: Optional[Literal['UnminimizeCommentPayload']] = Field(
+        'UnminimizeCommentPayload', alias='__typename'
+    )
+
+
+class UnpinIssuePayload(BaseModel):
+    """
+    Autogenerated return type of UnpinIssue
+    """
+
+    clientMutationId: Optional[String] = None
+    issue: Optional[Issue] = None
+    typename__: Optional[Literal['UnpinIssuePayload']] = Field(
+        'UnpinIssuePayload', alias='__typename'
+    )
+
+
+class UnpinnedEvent(Node):
+    """
+    Represents an 'unpinned' event on a given issue or pull request.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    issue: Issue
+    typename__: Optional[Literal['UnpinnedEvent']] = Field(
+        'UnpinnedEvent', alias='__typename'
+    )
+
+
+class UnresolveReviewThreadPayload(BaseModel):
+    """
+    Autogenerated return type of UnresolveReviewThread
+    """
+
+    clientMutationId: Optional[String] = None
+    thread: Optional[PullRequestReviewThread] = None
+    typename__: Optional[Literal['UnresolveReviewThreadPayload']] = Field(
+        'UnresolveReviewThreadPayload', alias='__typename'
+    )
+
+
+class UnsubscribedEvent(Node):
+    """
+    Represents an 'unsubscribed' event on a given `Subscribable`.
+    """
+
+    actor: Optional[Actor] = None
+    createdAt: DateTime
+    id: ID
+    subscribable: Subscribable
+    typename__: Optional[Literal['UnsubscribedEvent']] = Field(
+        'UnsubscribedEvent', alias='__typename'
+    )
+
+
+class UpdateBranchProtectionRulePayload(BaseModel):
+    """
+    Autogenerated return type of UpdateBranchProtectionRule
+    """
+
+    branchProtectionRule: Optional[BranchProtectionRule] = None
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['UpdateBranchProtectionRulePayload']] = Field(
+        'UpdateBranchProtectionRulePayload', alias='__typename'
+    )
+
+
+class UpdateCheckRunPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateCheckRun
+    """
+
+    checkRun: Optional[CheckRun] = None
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['UpdateCheckRunPayload']] = Field(
+        'UpdateCheckRunPayload', alias='__typename'
+    )
+
+
+class UpdateCheckSuitePreferencesPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateCheckSuitePreferences
+    """
+
+    clientMutationId: Optional[String] = None
+    repository: Optional[Repository] = None
+    typename__: Optional[Literal['UpdateCheckSuitePreferencesPayload']] = Field(
+        'UpdateCheckSuitePreferencesPayload', alias='__typename'
+    )
+
+
+class UpdateDiscussionCommentPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateDiscussionComment
+    """
+
+    clientMutationId: Optional[String] = None
+    comment: Optional[DiscussionComment] = None
+    typename__: Optional[Literal['UpdateDiscussionCommentPayload']] = Field(
+        'UpdateDiscussionCommentPayload', alias='__typename'
+    )
+
+
+class UpdateDiscussionPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateDiscussion
+    """
+
+    clientMutationId: Optional[String] = None
+    discussion: Optional[Discussion] = None
+    typename__: Optional[Literal['UpdateDiscussionPayload']] = Field(
+        'UpdateDiscussionPayload', alias='__typename'
+    )
+
+
+class UpdateEnterpriseAdministratorRolePayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnterpriseAdministratorRole
+    """
+
+    clientMutationId: Optional[String] = None
+    message: Optional[String] = None
+    typename__: Optional[Literal['UpdateEnterpriseAdministratorRolePayload']] = Field(
+        'UpdateEnterpriseAdministratorRolePayload', alias='__typename'
+    )
+
+
+class UpdateEnterpriseAllowPrivateRepositoryForkingSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnterpriseAllowPrivateRepositoryForkingSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    message: Optional[String] = None
+    typename__: Optional[
+        Literal['UpdateEnterpriseAllowPrivateRepositoryForkingSettingPayload']
+    ] = Field(
+        'UpdateEnterpriseAllowPrivateRepositoryForkingSettingPayload',
+        alias='__typename',
+    )
+
+
+class UpdateEnterpriseDefaultRepositoryPermissionSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnterpriseDefaultRepositoryPermissionSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    message: Optional[String] = None
+    typename__: Optional[
+        Literal['UpdateEnterpriseDefaultRepositoryPermissionSettingPayload']
+    ] = Field(
+        'UpdateEnterpriseDefaultRepositoryPermissionSettingPayload', alias='__typename'
+    )
+
+
+class UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnterpriseMembersCanChangeRepositoryVisibilitySetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    message: Optional[String] = None
+    typename__: Optional[
+        Literal['UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingPayload']
+    ] = Field(
+        'UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingPayload',
+        alias='__typename',
+    )
+
+
+class UpdateEnterpriseMembersCanCreateRepositoriesSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnterpriseMembersCanCreateRepositoriesSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    message: Optional[String] = None
+    typename__: Optional[
+        Literal['UpdateEnterpriseMembersCanCreateRepositoriesSettingPayload']
+    ] = Field(
+        'UpdateEnterpriseMembersCanCreateRepositoriesSettingPayload', alias='__typename'
+    )
+
+
+class UpdateEnterpriseMembersCanDeleteIssuesSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnterpriseMembersCanDeleteIssuesSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    message: Optional[String] = None
+    typename__: Optional[
+        Literal['UpdateEnterpriseMembersCanDeleteIssuesSettingPayload']
+    ] = Field(
+        'UpdateEnterpriseMembersCanDeleteIssuesSettingPayload', alias='__typename'
+    )
+
+
+class UpdateEnterpriseMembersCanDeleteRepositoriesSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnterpriseMembersCanDeleteRepositoriesSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    message: Optional[String] = None
+    typename__: Optional[
+        Literal['UpdateEnterpriseMembersCanDeleteRepositoriesSettingPayload']
+    ] = Field(
+        'UpdateEnterpriseMembersCanDeleteRepositoriesSettingPayload', alias='__typename'
+    )
+
+
+class UpdateEnterpriseMembersCanInviteCollaboratorsSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnterpriseMembersCanInviteCollaboratorsSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    message: Optional[String] = None
+    typename__: Optional[
+        Literal['UpdateEnterpriseMembersCanInviteCollaboratorsSettingPayload']
+    ] = Field(
+        'UpdateEnterpriseMembersCanInviteCollaboratorsSettingPayload',
+        alias='__typename',
+    )
+
+
+class UpdateEnterpriseMembersCanMakePurchasesSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnterpriseMembersCanMakePurchasesSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    message: Optional[String] = None
+    typename__: Optional[
+        Literal['UpdateEnterpriseMembersCanMakePurchasesSettingPayload']
+    ] = Field(
+        'UpdateEnterpriseMembersCanMakePurchasesSettingPayload', alias='__typename'
+    )
+
+
+class UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnterpriseMembersCanUpdateProtectedBranchesSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    message: Optional[String] = None
+    typename__: Optional[
+        Literal['UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingPayload']
+    ] = Field(
+        'UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingPayload',
+        alias='__typename',
+    )
+
+
+class UpdateEnterpriseMembersCanViewDependencyInsightsSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnterpriseMembersCanViewDependencyInsightsSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    message: Optional[String] = None
+    typename__: Optional[
+        Literal['UpdateEnterpriseMembersCanViewDependencyInsightsSettingPayload']
+    ] = Field(
+        'UpdateEnterpriseMembersCanViewDependencyInsightsSettingPayload',
+        alias='__typename',
+    )
+
+
+class UpdateEnterpriseOrganizationProjectsSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnterpriseOrganizationProjectsSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    message: Optional[String] = None
+    typename__: Optional[
+        Literal['UpdateEnterpriseOrganizationProjectsSettingPayload']
+    ] = Field('UpdateEnterpriseOrganizationProjectsSettingPayload', alias='__typename')
+
+
+class UpdateEnterpriseOwnerOrganizationRolePayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnterpriseOwnerOrganizationRole
+    """
+
+    clientMutationId: Optional[String] = None
+    message: Optional[String] = None
+    typename__: Optional[
+        Literal['UpdateEnterpriseOwnerOrganizationRolePayload']
+    ] = Field('UpdateEnterpriseOwnerOrganizationRolePayload', alias='__typename')
+
+
+class UpdateEnterpriseProfilePayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnterpriseProfile
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    typename__: Optional[Literal['UpdateEnterpriseProfilePayload']] = Field(
+        'UpdateEnterpriseProfilePayload', alias='__typename'
+    )
+
+
+class UpdateEnterpriseRepositoryProjectsSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnterpriseRepositoryProjectsSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    message: Optional[String] = None
+    typename__: Optional[
+        Literal['UpdateEnterpriseRepositoryProjectsSettingPayload']
+    ] = Field('UpdateEnterpriseRepositoryProjectsSettingPayload', alias='__typename')
+
+
+class UpdateEnterpriseTeamDiscussionsSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnterpriseTeamDiscussionsSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    message: Optional[String] = None
+    typename__: Optional[
+        Literal['UpdateEnterpriseTeamDiscussionsSettingPayload']
+    ] = Field('UpdateEnterpriseTeamDiscussionsSettingPayload', alias='__typename')
+
+
+class UpdateEnterpriseTwoFactorAuthenticationRequiredSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnterpriseTwoFactorAuthenticationRequiredSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterprise: Optional[Enterprise] = None
+    message: Optional[String] = None
+    typename__: Optional[
+        Literal['UpdateEnterpriseTwoFactorAuthenticationRequiredSettingPayload']
+    ] = Field(
+        'UpdateEnterpriseTwoFactorAuthenticationRequiredSettingPayload',
+        alias='__typename',
+    )
+
+
+class UpdateEnvironmentPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateEnvironment
+    """
+
+    clientMutationId: Optional[String] = None
+    environment: Optional[Environment] = None
+    typename__: Optional[Literal['UpdateEnvironmentPayload']] = Field(
+        'UpdateEnvironmentPayload', alias='__typename'
+    )
+
+
+class UpdateIpAllowListEnabledSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateIpAllowListEnabledSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    owner: Optional[IpAllowListOwner] = None
+    typename__: Optional[Literal['UpdateIpAllowListEnabledSettingPayload']] = Field(
+        'UpdateIpAllowListEnabledSettingPayload', alias='__typename'
+    )
+
+
+class UpdateIpAllowListEntryPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateIpAllowListEntry
+    """
+
+    clientMutationId: Optional[String] = None
+    ipAllowListEntry: Optional[IpAllowListEntry] = None
+    typename__: Optional[Literal['UpdateIpAllowListEntryPayload']] = Field(
+        'UpdateIpAllowListEntryPayload', alias='__typename'
+    )
+
+
+class UpdateIpAllowListForInstalledAppsEnabledSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateIpAllowListForInstalledAppsEnabledSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    owner: Optional[IpAllowListOwner] = None
+    typename__: Optional[
+        Literal['UpdateIpAllowListForInstalledAppsEnabledSettingPayload']
+    ] = Field(
+        'UpdateIpAllowListForInstalledAppsEnabledSettingPayload', alias='__typename'
+    )
+
+
+class UpdateIssueCommentPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateIssueComment
+    """
+
+    clientMutationId: Optional[String] = None
+    issueComment: Optional[IssueComment] = None
+    typename__: Optional[Literal['UpdateIssueCommentPayload']] = Field(
+        'UpdateIssueCommentPayload', alias='__typename'
+    )
+
+
+class UpdateIssuePayload(BaseModel):
+    """
+    Autogenerated return type of UpdateIssue
+    """
+
+    actor: Optional[Actor] = None
+    clientMutationId: Optional[String] = None
+    issue: Optional[Issue] = None
+    typename__: Optional[Literal['UpdateIssuePayload']] = Field(
+        'UpdateIssuePayload', alias='__typename'
+    )
+
+
+class UpdateLabelPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateLabel
+    """
+
+    clientMutationId: Optional[String] = None
+    label: Optional[Label] = None
+    typename__: Optional[Literal['UpdateLabelPayload']] = Field(
+        'UpdateLabelPayload', alias='__typename'
+    )
+
+
+class UpdateNotificationRestrictionSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateNotificationRestrictionSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    owner: Optional[VerifiableDomainOwner] = None
+    typename__: Optional[
+        Literal['UpdateNotificationRestrictionSettingPayload']
+    ] = Field('UpdateNotificationRestrictionSettingPayload', alias='__typename')
+
+
+class UpdateOrganizationAllowPrivateRepositoryForkingSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateOrganizationAllowPrivateRepositoryForkingSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    message: Optional[String] = None
+    organization: Optional[Organization] = None
+    typename__: Optional[
+        Literal['UpdateOrganizationAllowPrivateRepositoryForkingSettingPayload']
+    ] = Field(
+        'UpdateOrganizationAllowPrivateRepositoryForkingSettingPayload',
+        alias='__typename',
+    )
+
+
+class UpdateOrganizationWebCommitSignoffSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateOrganizationWebCommitSignoffSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    message: Optional[String] = None
+    organization: Optional[Organization] = None
+    typename__: Optional[
+        Literal['UpdateOrganizationWebCommitSignoffSettingPayload']
+    ] = Field('UpdateOrganizationWebCommitSignoffSettingPayload', alias='__typename')
+
+
+class UpdateParameters(BaseModel):
+    """
+    Only allow users with bypass permission to update matching refs.
+    """
+
+    updateAllowsFetchAndMerge: Boolean
+    typename__: Optional[Literal['UpdateParameters']] = Field(
+        'UpdateParameters', alias='__typename'
+    )
+
+
+class UpdatePatreonSponsorabilityPayload(BaseModel):
+    """
+    Autogenerated return type of UpdatePatreonSponsorability
+    """
+
+    clientMutationId: Optional[String] = None
+    sponsorsListing: Optional[SponsorsListing] = None
+    typename__: Optional[Literal['UpdatePatreonSponsorabilityPayload']] = Field(
+        'UpdatePatreonSponsorabilityPayload', alias='__typename'
+    )
+
+
+class UpdateProjectCardPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateProjectCard
+    """
+
+    clientMutationId: Optional[String] = None
+    projectCard: Optional[ProjectCard] = None
+    typename__: Optional[Literal['UpdateProjectCardPayload']] = Field(
+        'UpdateProjectCardPayload', alias='__typename'
+    )
+
+
+class UpdateProjectColumnPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateProjectColumn
+    """
+
+    clientMutationId: Optional[String] = None
+    projectColumn: Optional[ProjectColumn] = None
+    typename__: Optional[Literal['UpdateProjectColumnPayload']] = Field(
+        'UpdateProjectColumnPayload', alias='__typename'
+    )
+
+
+class UpdateProjectPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateProject
+    """
+
+    clientMutationId: Optional[String] = None
+    project: Optional[Project] = None
+    typename__: Optional[Literal['UpdateProjectPayload']] = Field(
+        'UpdateProjectPayload', alias='__typename'
+    )
+
+
+class UpdateProjectV2CollaboratorsPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateProjectV2Collaborators
+    """
+
+    clientMutationId: Optional[String] = None
+    collaborators: Optional[ProjectV2ActorConnection] = None
+    typename__: Optional[Literal['UpdateProjectV2CollaboratorsPayload']] = Field(
+        'UpdateProjectV2CollaboratorsPayload', alias='__typename'
+    )
+
+
+class UpdateProjectV2DraftIssuePayload(BaseModel):
+    """
+    Autogenerated return type of UpdateProjectV2DraftIssue
+    """
+
+    clientMutationId: Optional[String] = None
+    draftIssue: Optional[DraftIssue] = None
+    typename__: Optional[Literal['UpdateProjectV2DraftIssuePayload']] = Field(
+        'UpdateProjectV2DraftIssuePayload', alias='__typename'
+    )
+
+
+class UpdateProjectV2ItemFieldValuePayload(BaseModel):
+    """
+    Autogenerated return type of UpdateProjectV2ItemFieldValue
+    """
+
+    clientMutationId: Optional[String] = None
+    projectV2Item: Optional[ProjectV2Item] = None
+    typename__: Optional[Literal['UpdateProjectV2ItemFieldValuePayload']] = Field(
+        'UpdateProjectV2ItemFieldValuePayload', alias='__typename'
+    )
+
+
+class UpdateProjectV2ItemPositionPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateProjectV2ItemPosition
+    """
+
+    clientMutationId: Optional[String] = None
+    items: Optional[ProjectV2ItemConnection] = None
+    typename__: Optional[Literal['UpdateProjectV2ItemPositionPayload']] = Field(
+        'UpdateProjectV2ItemPositionPayload', alias='__typename'
+    )
+
+
+class UpdateProjectV2Payload(BaseModel):
+    """
+    Autogenerated return type of UpdateProjectV2
+    """
+
+    clientMutationId: Optional[String] = None
+    projectV2: Optional[ProjectV2] = None
+    typename__: Optional[Literal['UpdateProjectV2Payload']] = Field(
+        'UpdateProjectV2Payload', alias='__typename'
+    )
+
+
+class UpdatePullRequestBranchPayload(BaseModel):
+    """
+    Autogenerated return type of UpdatePullRequestBranch
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['UpdatePullRequestBranchPayload']] = Field(
+        'UpdatePullRequestBranchPayload', alias='__typename'
+    )
+
+
+class UpdatePullRequestPayload(BaseModel):
+    """
+    Autogenerated return type of UpdatePullRequest
+    """
+
+    actor: Optional[Actor] = None
+    clientMutationId: Optional[String] = None
+    pullRequest: Optional[PullRequest] = None
+    typename__: Optional[Literal['UpdatePullRequestPayload']] = Field(
+        'UpdatePullRequestPayload', alias='__typename'
+    )
+
+
+class UpdatePullRequestReviewCommentPayload(BaseModel):
+    """
+    Autogenerated return type of UpdatePullRequestReviewComment
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequestReviewComment: Optional[PullRequestReviewComment] = None
+    typename__: Optional[Literal['UpdatePullRequestReviewCommentPayload']] = Field(
+        'UpdatePullRequestReviewCommentPayload', alias='__typename'
+    )
+
+
+class UpdatePullRequestReviewPayload(BaseModel):
+    """
+    Autogenerated return type of UpdatePullRequestReview
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequestReview: Optional[PullRequestReview] = None
+    typename__: Optional[Literal['UpdatePullRequestReviewPayload']] = Field(
+        'UpdatePullRequestReviewPayload', alias='__typename'
+    )
+
+
+class UpdateRefPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateRef
+    """
+
+    clientMutationId: Optional[String] = None
+    ref: Optional[Ref] = None
+    typename__: Optional[Literal['UpdateRefPayload']] = Field(
+        'UpdateRefPayload', alias='__typename'
+    )
+
+
+class UpdateRefsPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateRefs
+    """
+
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['UpdateRefsPayload']] = Field(
+        'UpdateRefsPayload', alias='__typename'
+    )
+
+
+class UpdateRepositoryPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateRepository
+    """
+
+    clientMutationId: Optional[String] = None
+    repository: Optional[Repository] = None
+    typename__: Optional[Literal['UpdateRepositoryPayload']] = Field(
+        'UpdateRepositoryPayload', alias='__typename'
+    )
+
+
+class UpdateRepositoryRulesetPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateRepositoryRuleset
+    """
+
+    clientMutationId: Optional[String] = None
+    ruleset: Optional[RepositoryRuleset] = None
+    typename__: Optional[Literal['UpdateRepositoryRulesetPayload']] = Field(
+        'UpdateRepositoryRulesetPayload', alias='__typename'
+    )
+
+
+class UpdateRepositoryWebCommitSignoffSettingPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateRepositoryWebCommitSignoffSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    message: Optional[String] = None
+    repository: Optional[Repository] = None
+    typename__: Optional[
+        Literal['UpdateRepositoryWebCommitSignoffSettingPayload']
+    ] = Field('UpdateRepositoryWebCommitSignoffSettingPayload', alias='__typename')
+
+
+class UpdateSponsorshipPreferencesPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateSponsorshipPreferences
+    """
+
+    clientMutationId: Optional[String] = None
+    sponsorship: Optional[Sponsorship] = None
+    typename__: Optional[Literal['UpdateSponsorshipPreferencesPayload']] = Field(
+        'UpdateSponsorshipPreferencesPayload', alias='__typename'
+    )
+
+
+class UpdateSubscriptionPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateSubscription
+    """
+
+    clientMutationId: Optional[String] = None
+    subscribable: Optional[Subscribable] = None
+    typename__: Optional[Literal['UpdateSubscriptionPayload']] = Field(
+        'UpdateSubscriptionPayload', alias='__typename'
+    )
+
+
+class UpdateTeamDiscussionCommentPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateTeamDiscussionComment
+    """
+
+    clientMutationId: Optional[String] = None
+    teamDiscussionComment: Optional[TeamDiscussionComment] = None
+    typename__: Optional[Literal['UpdateTeamDiscussionCommentPayload']] = Field(
+        'UpdateTeamDiscussionCommentPayload', alias='__typename'
+    )
+
+
+class UpdateTeamDiscussionPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateTeamDiscussion
+    """
+
+    clientMutationId: Optional[String] = None
+    teamDiscussion: Optional[TeamDiscussion] = None
+    typename__: Optional[Literal['UpdateTeamDiscussionPayload']] = Field(
+        'UpdateTeamDiscussionPayload', alias='__typename'
+    )
+
+
+class UpdateTeamReviewAssignmentPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateTeamReviewAssignment
+    """
+
+    clientMutationId: Optional[String] = None
+    team: Optional[Team] = None
+    typename__: Optional[Literal['UpdateTeamReviewAssignmentPayload']] = Field(
+        'UpdateTeamReviewAssignmentPayload', alias='__typename'
+    )
+
+
+class UpdateTeamsRepositoryPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateTeamsRepository
+    """
+
+    clientMutationId: Optional[String] = None
+    repository: Optional[Repository] = None
+    teams: Optional[List[Team]] = Field(default_factory=list)
+    typename__: Optional[Literal['UpdateTeamsRepositoryPayload']] = Field(
+        'UpdateTeamsRepositoryPayload', alias='__typename'
+    )
+
+
+class UpdateTopicsPayload(BaseModel):
+    """
+    Autogenerated return type of UpdateTopics
+    """
+
+    clientMutationId: Optional[String] = None
+    invalidTopicNames: Optional[List[String]] = Field(default_factory=list)
+    repository: Optional[Repository] = None
+    typename__: Optional[Literal['UpdateTopicsPayload']] = Field(
+        'UpdateTopicsPayload', alias='__typename'
+    )
+
+
+class User(
+    Actor,
+    Node,
+    PackageOwner,
+    ProfileOwner,
+    ProjectOwner,
+    ProjectV2Owner,
+    ProjectV2Recent,
+    RepositoryDiscussionAuthor,
+    RepositoryDiscussionCommentAuthor,
+    RepositoryOwner,
+    Sponsorable,
+    UniformResourceLocatable,
+):
+    """
+    A user is an individual's account on GitHub that owns repositories and can make new content.
+    """
+
+    anyPinnableItems: Boolean
+    avatarUrl: URI
+    bio: Optional[String] = None
+    bioHTML: HTML
+    canReceiveOrganizationEmailsWhenNotificationsRestricted: Boolean
+    commitComments: CommitCommentConnection
+    company: Optional[String] = None
+    companyHTML: HTML
+    contributionsCollection: ContributionsCollection
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    email: String
+    enterprises: Optional[EnterpriseConnection] = None
+    estimatedNextSponsorsPayoutInCents: Int
+    followers: FollowerConnection
+    following: FollowingConnection
+    gist: Optional[Gist] = None
+    gistComments: GistCommentConnection
+    gists: GistConnection
+    hasSponsorsListing: Boolean
+    hovercard: Hovercard
+    id: ID
+    interactionAbility: Optional[RepositoryInteractionAbility] = None
+    isBountyHunter: Boolean
+    isCampusExpert: Boolean
+    isDeveloperProgramMember: Boolean
+    isEmployee: Boolean
+    isFollowingViewer: Boolean
+    isGitHubStar: Boolean
+    isHireable: Boolean
+    isSiteAdmin: Boolean
+    isSponsoredBy: Boolean
+    isSponsoringViewer: Boolean
+    isViewer: Boolean
+    issueComments: IssueCommentConnection
+    issues: IssueConnection
+    itemShowcase: ProfileItemShowcase
+    location: Optional[String] = None
+    login: String
+    monthlyEstimatedSponsorsIncomeInCents: Int
+    name: Optional[String] = None
+    organization: Optional[Organization] = None
+    organizationVerifiedDomainEmails: List[String]
+    organizations: OrganizationConnection
+    packages: PackageConnection
+    pinnableItems: PinnableItemConnection
+    pinnedItems: PinnableItemConnection
+    pinnedItemsRemaining: Int
+    project: Optional[Project] = None
+    projectV2: Optional[ProjectV2] = None
+    projects: ProjectConnection
+    projectsResourcePath: URI
+    projectsUrl: URI
+    projectsV2: ProjectV2Connection
+    pronouns: Optional[String] = None
+    publicKeys: PublicKeyConnection
+    pullRequests: PullRequestConnection
+    recentProjects: ProjectV2Connection
+    repositories: RepositoryConnection
+    repositoriesContributedTo: RepositoryConnection
+    repository: Optional[Repository] = None
+    repositoryDiscussionComments: DiscussionCommentConnection
+    repositoryDiscussions: DiscussionConnection
+    resourcePath: URI
+    savedReplies: Optional[SavedReplyConnection] = None
+    socialAccounts: SocialAccountConnection
+    sponsoring: SponsorConnection
+    sponsors: SponsorConnection
+    sponsorsActivities: SponsorsActivityConnection
+    sponsorsListing: Optional[SponsorsListing] = None
+    sponsorshipForViewerAsSponsor: Optional[Sponsorship] = None
+    sponsorshipForViewerAsSponsorable: Optional[Sponsorship] = None
+    sponsorshipNewsletters: SponsorshipNewsletterConnection
+    sponsorshipsAsMaintainer: SponsorshipConnection
+    sponsorshipsAsSponsor: SponsorshipConnection
+    starredRepositories: StarredRepositoryConnection
+    status: Optional[UserStatus] = None
+    topRepositories: RepositoryConnection
+    totalSponsorshipAmountAsSponsorInCents: Optional[Int] = None
+    twitterUsername: Optional[String] = None
+    updatedAt: DateTime
+    url: URI
+    viewerCanChangePinnedItems: Boolean
+    viewerCanCreateProjects: Boolean
+    viewerCanFollow: Boolean
+    viewerCanSponsor: Boolean
+    viewerIsFollowing: Boolean
+    viewerIsSponsoring: Boolean
+    watching: RepositoryConnection
+    websiteUrl: Optional[URI] = None
+    typename__: Optional[Literal['User']] = Field('User', alias='__typename')
+
+
+class UserBlockedEvent(Node):
+    """
+    Represents a 'user_blocked' event on a given user.
+    """
+
+    actor: Optional[Actor] = None
+    blockDuration: UserBlockDuration
+    createdAt: DateTime
+    id: ID
+    subject: Optional[User] = None
+    typename__: Optional[Literal['UserBlockedEvent']] = Field(
+        'UserBlockedEvent', alias='__typename'
+    )
+
+
+class UserConnection(BaseModel):
+    """
+    The connection type for User.
+    """
+
+    edges: Optional[List[Optional[UserEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[User]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['UserConnection']] = Field(
+        'UserConnection', alias='__typename'
+    )
+
+
+class UserContentEdit(Node):
+    """
+    An edit on user content
+    """
+
+    createdAt: DateTime
+    deletedAt: Optional[DateTime] = None
+    deletedBy: Optional[Actor] = None
+    diff: Optional[String] = None
+    editedAt: DateTime
+    editor: Optional[Actor] = None
+    id: ID
+    updatedAt: DateTime
+    typename__: Optional[Literal['UserContentEdit']] = Field(
+        'UserContentEdit', alias='__typename'
+    )
+
+
+class UserContentEditConnection(BaseModel):
+    """
+    A list of edits to content.
+    """
+
+    edges: Optional[List[Optional[UserContentEditEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[UserContentEdit]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['UserContentEditConnection']] = Field(
+        'UserContentEditConnection', alias='__typename'
+    )
+
+
+class UserContentEditEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[UserContentEdit] = None
+    typename__: Optional[Literal['UserContentEditEdge']] = Field(
+        'UserContentEditEdge', alias='__typename'
+    )
+
+
+class UserEdge(BaseModel):
+    """
+    Represents a user.
+    """
+
+    cursor: String
+    node: Optional[User] = None
+    typename__: Optional[Literal['UserEdge']] = Field('UserEdge', alias='__typename')
+
+
+class UserEmailMetadata(BaseModel):
+    """
+    Email attributes from External Identity
+    """
+
+    primary: Optional[Boolean] = None
+    type: Optional[String] = None
+    value: String
+    typename__: Optional[Literal['UserEmailMetadata']] = Field(
+        'UserEmailMetadata', alias='__typename'
+    )
+
+
+class UserStatus(Node):
+    """
+    The user's description of what they're currently doing.
+    """
+
+    createdAt: DateTime
+    emoji: Optional[String] = None
+    emojiHTML: Optional[HTML] = None
+    expiresAt: Optional[DateTime] = None
+    id: ID
+    indicatesLimitedAvailability: Boolean
+    message: Optional[String] = None
+    organization: Optional[Organization] = None
+    updatedAt: DateTime
+    user: User
+    typename__: Optional[Literal['UserStatus']] = Field(
+        'UserStatus', alias='__typename'
+    )
+
+
+class UserStatusConnection(BaseModel):
+    """
+    The connection type for UserStatus.
+    """
+
+    edges: Optional[List[Optional[UserStatusEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[UserStatus]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['UserStatusConnection']] = Field(
+        'UserStatusConnection', alias='__typename'
+    )
+
+
+class UserStatusEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[UserStatus] = None
+    typename__: Optional[Literal['UserStatusEdge']] = Field(
+        'UserStatusEdge', alias='__typename'
+    )
+
+
+class VerifiableDomain(Node):
+    """
+    A domain that can be verified or approved for an organization or an enterprise.
+    """
+
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    dnsHostName: Optional[URI] = None
+    domain: URI
+    hasFoundHostName: Boolean
+    hasFoundVerificationToken: Boolean
+    id: ID
+    isApproved: Boolean
+    isRequiredForPolicyEnforcement: Boolean
+    isVerified: Boolean
+    owner: VerifiableDomainOwner
+    punycodeEncodedDomain: URI
+    tokenExpirationTime: Optional[DateTime] = None
+    updatedAt: DateTime
+    verificationToken: Optional[String] = None
+    typename__: Optional[Literal['VerifiableDomain']] = Field(
+        'VerifiableDomain', alias='__typename'
+    )
+
+
+class VerifiableDomainConnection(BaseModel):
+    """
+    The connection type for VerifiableDomain.
+    """
+
+    edges: Optional[List[Optional[VerifiableDomainEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[VerifiableDomain]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['VerifiableDomainConnection']] = Field(
+        'VerifiableDomainConnection', alias='__typename'
+    )
+
+
+class VerifiableDomainEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[VerifiableDomain] = None
+    typename__: Optional[Literal['VerifiableDomainEdge']] = Field(
+        'VerifiableDomainEdge', alias='__typename'
+    )
+
+
+class VerifyVerifiableDomainPayload(BaseModel):
+    """
+    Autogenerated return type of VerifyVerifiableDomain
+    """
+
+    clientMutationId: Optional[String] = None
+    domain: Optional[VerifiableDomain] = None
+    typename__: Optional[Literal['VerifyVerifiableDomainPayload']] = Field(
+        'VerifyVerifiableDomainPayload', alias='__typename'
+    )
+
+
+class ViewerHovercardContext(HovercardContext):
+    """
+    A hovercard context with a message describing how the viewer is related.
+    """
+
+    message: String
+    octicon: String
+    viewer: User
+    typename__: Optional[Literal['ViewerHovercardContext']] = Field(
+        'ViewerHovercardContext', alias='__typename'
+    )
+
+
+class Workflow(Node, UniformResourceLocatable):
+    """
+    A workflow contains meta information about an Actions workflow file.
+    """
+
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    id: ID
+    name: String
+    resourcePath: URI
+    runs: WorkflowRunConnection
+    state: WorkflowState
+    updatedAt: DateTime
+    url: URI
+    typename__: Optional[Literal['Workflow']] = Field('Workflow', alias='__typename')
+
+
+class WorkflowFileReference(BaseModel):
+    """
+    A workflow that must run for this rule to pass
+    """
+
+    path: String
+    ref: Optional[String] = None
+    repositoryId: Int
+    sha: Optional[String] = None
+    typename__: Optional[Literal['WorkflowFileReference']] = Field(
+        'WorkflowFileReference', alias='__typename'
+    )
+
+
+class WorkflowRun(Node, UniformResourceLocatable):
+    """
+    A workflow run.
+    """
+
+    checkSuite: CheckSuite
+    createdAt: DateTime
+    databaseId: Optional[Int] = None
+    deploymentReviews: DeploymentReviewConnection
+    event: String
+    file: Optional[WorkflowRunFile] = None
+    id: ID
+    pendingDeploymentRequests: DeploymentRequestConnection
+    resourcePath: URI
+    runNumber: Int
+    updatedAt: DateTime
+    url: URI
+    workflow: Workflow
+    typename__: Optional[Literal['WorkflowRun']] = Field(
+        'WorkflowRun', alias='__typename'
+    )
+
+
+class WorkflowRunConnection(BaseModel):
+    """
+    The connection type for WorkflowRun.
+    """
+
+    edges: Optional[List[Optional[WorkflowRunEdge]]] = Field(default_factory=list)
+    nodes: Optional[List[Optional[WorkflowRun]]] = Field(default_factory=list)
+    pageInfo: PageInfo
+    totalCount: Int
+    typename__: Optional[Literal['WorkflowRunConnection']] = Field(
+        'WorkflowRunConnection', alias='__typename'
+    )
+
+
+class WorkflowRunEdge(BaseModel):
+    """
+    An edge in a connection.
+    """
+
+    cursor: String
+    node: Optional[WorkflowRun] = None
+    typename__: Optional[Literal['WorkflowRunEdge']] = Field(
+        'WorkflowRunEdge', alias='__typename'
+    )
+
+
+class WorkflowRunFile(Node, UniformResourceLocatable):
+    """
+    An executed workflow file for a workflow run.
+    """
+
+    id: ID
+    path: String
+    repositoryFileUrl: URI
+    repositoryName: URI
+    resourcePath: URI
+    run: WorkflowRun
+    url: URI
+    viewerCanPushRepository: Boolean
+    viewerCanReadRepository: Boolean
+    typename__: Optional[Literal['WorkflowRunFile']] = Field(
+        'WorkflowRunFile', alias='__typename'
+    )
+
+
+class WorkflowsParameters(BaseModel):
+    """
+    Require all changes made to a targeted branch to pass the specified workflows before they can be merged.
+    """
+
+    workflows: List[WorkflowFileReference]
+    typename__: Optional[Literal['WorkflowsParameters']] = Field(
+        'WorkflowsParameters', alias='__typename'
+    )
+
+
+class AbortQueuedMigrationsInput(BaseModel):
+    """
+    Autogenerated input type of AbortQueuedMigrations
+    """
+
+    clientMutationId: Optional[String] = None
+    ownerId: ID
+    typename__: Optional[Literal['AbortQueuedMigrationsInput']] = Field(
+        'AbortQueuedMigrationsInput', alias='__typename'
+    )
+
+
+class AbortRepositoryMigrationInput(BaseModel):
+    """
+    Autogenerated input type of AbortRepositoryMigration
+    """
+
+    clientMutationId: Optional[String] = None
+    migrationId: ID
+    typename__: Optional[Literal['AbortRepositoryMigrationInput']] = Field(
+        'AbortRepositoryMigrationInput', alias='__typename'
+    )
+
+
+class AcceptEnterpriseAdministratorInvitationInput(BaseModel):
+    """
+    Autogenerated input type of AcceptEnterpriseAdministratorInvitation
+    """
+
+    clientMutationId: Optional[String] = None
+    invitationId: ID
+    typename__: Optional[
+        Literal['AcceptEnterpriseAdministratorInvitationInput']
+    ] = Field('AcceptEnterpriseAdministratorInvitationInput', alias='__typename')
+
+
+class AcceptTopicSuggestionInput(BaseModel):
+    """
+    Autogenerated input type of AcceptTopicSuggestion
+    """
+
+    clientMutationId: Optional[String] = None
+    name: String
+    repositoryId: ID
+    typename__: Optional[Literal['AcceptTopicSuggestionInput']] = Field(
+        'AcceptTopicSuggestionInput', alias='__typename'
+    )
+
+
+class AddAssigneesToAssignableInput(BaseModel):
+    """
+    Autogenerated input type of AddAssigneesToAssignable
+    """
+
+    assignableId: ID
+    assigneeIds: List[ID]
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['AddAssigneesToAssignableInput']] = Field(
+        'AddAssigneesToAssignableInput', alias='__typename'
+    )
+
+
+class AddCommentInput(BaseModel):
+    """
+    Autogenerated input type of AddComment
+    """
+
+    body: String
+    clientMutationId: Optional[String] = None
+    subjectId: ID
+    typename__: Optional[Literal['AddCommentInput']] = Field(
+        'AddCommentInput', alias='__typename'
+    )
+
+
+class AddDiscussionCommentInput(BaseModel):
+    """
+    Autogenerated input type of AddDiscussionComment
+    """
+
+    body: String
+    clientMutationId: Optional[String] = None
+    discussionId: ID
+    replyToId: Optional[ID] = None
+    typename__: Optional[Literal['AddDiscussionCommentInput']] = Field(
+        'AddDiscussionCommentInput', alias='__typename'
+    )
+
+
+class AddDiscussionPollVoteInput(BaseModel):
+    """
+    Autogenerated input type of AddDiscussionPollVote
+    """
+
+    clientMutationId: Optional[String] = None
+    pollOptionId: ID
+    typename__: Optional[Literal['AddDiscussionPollVoteInput']] = Field(
+        'AddDiscussionPollVoteInput', alias='__typename'
+    )
+
+
+class AddEnterpriseOrganizationMemberInput(BaseModel):
+    """
+    Autogenerated input type of AddEnterpriseOrganizationMember
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    organizationId: ID
+    role: Optional[OrganizationMemberRole] = None
+    userIds: List[ID]
+    typename__: Optional[Literal['AddEnterpriseOrganizationMemberInput']] = Field(
+        'AddEnterpriseOrganizationMemberInput', alias='__typename'
+    )
+
+
+class AddEnterpriseSupportEntitlementInput(BaseModel):
+    """
+    Autogenerated input type of AddEnterpriseSupportEntitlement
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    login: String
+    typename__: Optional[Literal['AddEnterpriseSupportEntitlementInput']] = Field(
+        'AddEnterpriseSupportEntitlementInput', alias='__typename'
+    )
+
+
+class AddLabelsToLabelableInput(BaseModel):
+    """
+    Autogenerated input type of AddLabelsToLabelable
+    """
+
+    clientMutationId: Optional[String] = None
+    labelIds: List[ID]
+    labelableId: ID
+    typename__: Optional[Literal['AddLabelsToLabelableInput']] = Field(
+        'AddLabelsToLabelableInput', alias='__typename'
+    )
+
+
+class AddProjectCardInput(BaseModel):
+    """
+    Autogenerated input type of AddProjectCard
+    """
+
+    clientMutationId: Optional[String] = None
+    contentId: Optional[ID] = None
+    note: Optional[String] = None
+    projectColumnId: ID
+    typename__: Optional[Literal['AddProjectCardInput']] = Field(
+        'AddProjectCardInput', alias='__typename'
+    )
+
+
+class AddProjectColumnInput(BaseModel):
+    """
+    Autogenerated input type of AddProjectColumn
+    """
+
+    clientMutationId: Optional[String] = None
+    name: String
+    projectId: ID
+    typename__: Optional[Literal['AddProjectColumnInput']] = Field(
+        'AddProjectColumnInput', alias='__typename'
+    )
+
+
+class AddProjectV2DraftIssueInput(BaseModel):
+    """
+    Autogenerated input type of AddProjectV2DraftIssue
+    """
+
+    assigneeIds: Optional[List[ID]] = None
+    body: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    projectId: ID
+    title: String
+    typename__: Optional[Literal['AddProjectV2DraftIssueInput']] = Field(
+        'AddProjectV2DraftIssueInput', alias='__typename'
+    )
+
+
+class AddProjectV2ItemByIdInput(BaseModel):
+    """
+    Autogenerated input type of AddProjectV2ItemById
+    """
+
+    clientMutationId: Optional[String] = None
+    contentId: ID
+    projectId: ID
+    typename__: Optional[Literal['AddProjectV2ItemByIdInput']] = Field(
+        'AddProjectV2ItemByIdInput', alias='__typename'
+    )
+
+
+class AddPullRequestReviewCommentInput(BaseModel):
+    """
+    Autogenerated input type of AddPullRequestReviewComment
+    """
+
+    body: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    commitOID: Optional[GitObjectID] = None
+    inReplyTo: Optional[ID] = None
+    path: Optional[String] = None
+    position: Optional[Int] = None
+    pullRequestId: Optional[ID] = None
+    pullRequestReviewId: Optional[ID] = None
+    typename__: Optional[Literal['AddPullRequestReviewCommentInput']] = Field(
+        'AddPullRequestReviewCommentInput', alias='__typename'
+    )
+
+
+class AddPullRequestReviewInput(BaseModel):
+    """
+    Autogenerated input type of AddPullRequestReview
+    """
+
+    body: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    comments: Optional[List[Optional[DraftPullRequestReviewComment]]] = None
+    commitOID: Optional[GitObjectID] = None
+    event: Optional[PullRequestReviewEvent] = None
+    pullRequestId: ID
+    threads: Optional[List[Optional[DraftPullRequestReviewThread]]] = None
+    typename__: Optional[Literal['AddPullRequestReviewInput']] = Field(
+        'AddPullRequestReviewInput', alias='__typename'
+    )
+
+
+class AddPullRequestReviewThreadInput(BaseModel):
+    """
+    Autogenerated input type of AddPullRequestReviewThread
+    """
+
+    body: String
+    clientMutationId: Optional[String] = None
+    line: Optional[Int] = None
+    path: String
+    pullRequestId: Optional[ID] = None
+    pullRequestReviewId: Optional[ID] = None
+    side: Optional[DiffSide] = 'RIGHT'
+    startLine: Optional[Int] = None
+    startSide: Optional[DiffSide] = 'RIGHT'
+    subjectType: Optional[PullRequestReviewThreadSubjectType] = 'LINE'
+    typename__: Optional[Literal['AddPullRequestReviewThreadInput']] = Field(
+        'AddPullRequestReviewThreadInput', alias='__typename'
+    )
+
+
+class AddPullRequestReviewThreadReplyInput(BaseModel):
+    """
+    Autogenerated input type of AddPullRequestReviewThreadReply
+    """
+
+    body: String
+    clientMutationId: Optional[String] = None
+    pullRequestReviewId: Optional[ID] = None
+    pullRequestReviewThreadId: ID
+    typename__: Optional[Literal['AddPullRequestReviewThreadReplyInput']] = Field(
+        'AddPullRequestReviewThreadReplyInput', alias='__typename'
+    )
+
+
+class AddReactionInput(BaseModel):
+    """
+    Autogenerated input type of AddReaction
+    """
+
+    clientMutationId: Optional[String] = None
+    content: ReactionContent
+    subjectId: ID
+    typename__: Optional[Literal['AddReactionInput']] = Field(
+        'AddReactionInput', alias='__typename'
+    )
+
+
+class AddStarInput(BaseModel):
+    """
+    Autogenerated input type of AddStar
+    """
+
+    clientMutationId: Optional[String] = None
+    starrableId: ID
+    typename__: Optional[Literal['AddStarInput']] = Field(
+        'AddStarInput', alias='__typename'
+    )
+
+
+class AddUpvoteInput(BaseModel):
+    """
+    Autogenerated input type of AddUpvote
+    """
+
+    clientMutationId: Optional[String] = None
+    subjectId: ID
+    typename__: Optional[Literal['AddUpvoteInput']] = Field(
+        'AddUpvoteInput', alias='__typename'
+    )
+
+
+class AddVerifiableDomainInput(BaseModel):
+    """
+    Autogenerated input type of AddVerifiableDomain
+    """
+
+    clientMutationId: Optional[String] = None
+    domain: URI
+    ownerId: ID
+    typename__: Optional[Literal['AddVerifiableDomainInput']] = Field(
+        'AddVerifiableDomainInput', alias='__typename'
+    )
+
+
+class ApproveDeploymentsInput(BaseModel):
+    """
+    Autogenerated input type of ApproveDeployments
+    """
+
+    clientMutationId: Optional[String] = None
+    comment: Optional[String] = ''
+    environmentIds: List[ID]
+    workflowRunId: ID
+    typename__: Optional[Literal['ApproveDeploymentsInput']] = Field(
+        'ApproveDeploymentsInput', alias='__typename'
+    )
+
+
+class ApproveVerifiableDomainInput(BaseModel):
+    """
+    Autogenerated input type of ApproveVerifiableDomain
+    """
+
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['ApproveVerifiableDomainInput']] = Field(
+        'ApproveVerifiableDomainInput', alias='__typename'
+    )
+
+
+class ArchiveProjectV2ItemInput(BaseModel):
+    """
+    Autogenerated input type of ArchiveProjectV2Item
+    """
+
+    clientMutationId: Optional[String] = None
+    itemId: ID
+    projectId: ID
+    typename__: Optional[Literal['ArchiveProjectV2ItemInput']] = Field(
+        'ArchiveProjectV2ItemInput', alias='__typename'
+    )
+
+
+class ArchiveRepositoryInput(BaseModel):
+    """
+    Autogenerated input type of ArchiveRepository
+    """
+
+    clientMutationId: Optional[String] = None
+    repositoryId: ID
+    typename__: Optional[Literal['ArchiveRepositoryInput']] = Field(
+        'ArchiveRepositoryInput', alias='__typename'
+    )
+
+
+class AuditLogOrder(BaseModel):
+    """
+    Ordering options for Audit Log connections.
+    """
+
+    direction: Optional[OrderDirection] = None
+    field: Optional[AuditLogOrderField] = None
+    typename__: Optional[Literal['AuditLogOrder']] = Field(
+        'AuditLogOrder', alias='__typename'
+    )
+
+
+class BranchNamePatternParametersInput(BaseModel):
+    """
+    Parameters to be used for the branch_name_pattern rule
+    """
+
+    name: Optional[String] = None
+    negate: Optional[Boolean] = None
+    operator: String
+    pattern: String
+    typename__: Optional[Literal['BranchNamePatternParametersInput']] = Field(
+        'BranchNamePatternParametersInput', alias='__typename'
+    )
+
+
+class BulkSponsorship(BaseModel):
+    """
+    Information about a sponsorship to make for a user or organization with a GitHub
+    Sponsors profile, as part of sponsoring many users or organizations at once.
+    """
+
+    amount: Int
+    sponsorableId: Optional[ID] = None
+    sponsorableLogin: Optional[String] = None
+    typename__: Optional[Literal['BulkSponsorship']] = Field(
+        'BulkSponsorship', alias='__typename'
+    )
+
+
+class CancelEnterpriseAdminInvitationInput(BaseModel):
+    """
+    Autogenerated input type of CancelEnterpriseAdminInvitation
+    """
+
+    clientMutationId: Optional[String] = None
+    invitationId: ID
+    typename__: Optional[Literal['CancelEnterpriseAdminInvitationInput']] = Field(
+        'CancelEnterpriseAdminInvitationInput', alias='__typename'
+    )
+
+
+class CancelSponsorshipInput(BaseModel):
+    """
+    Autogenerated input type of CancelSponsorship
+    """
+
+    clientMutationId: Optional[String] = None
+    sponsorId: Optional[ID] = None
+    sponsorLogin: Optional[String] = None
+    sponsorableId: Optional[ID] = None
+    sponsorableLogin: Optional[String] = None
+    typename__: Optional[Literal['CancelSponsorshipInput']] = Field(
+        'CancelSponsorshipInput', alias='__typename'
+    )
+
+
+class ChangeUserStatusInput(BaseModel):
+    """
+    Autogenerated input type of ChangeUserStatus
+    """
+
+    clientMutationId: Optional[String] = None
+    emoji: Optional[String] = None
+    expiresAt: Optional[DateTime] = None
+    limitedAvailability: Optional[Boolean] = False
+    message: Optional[String] = None
+    organizationId: Optional[ID] = None
+    typename__: Optional[Literal['ChangeUserStatusInput']] = Field(
+        'ChangeUserStatusInput', alias='__typename'
+    )
+
+
+class CheckAnnotationData(BaseModel):
+    """
+    Information from a check run analysis to specific lines of code.
+    """
+
+    annotationLevel: CheckAnnotationLevel
+    location: CheckAnnotationRange
+    message: String
+    path: String
+    rawDetails: Optional[String] = None
+    title: Optional[String] = None
+    typename__: Optional[Literal['CheckAnnotationData']] = Field(
+        'CheckAnnotationData', alias='__typename'
+    )
+
+
+class CheckAnnotationRange(BaseModel):
+    """
+    Information from a check run analysis to specific lines of code.
+    """
+
+    endColumn: Optional[Int] = None
+    endLine: Int
+    startColumn: Optional[Int] = None
+    startLine: Int
+    typename__: Optional[Literal['CheckAnnotationRange']] = Field(
+        'CheckAnnotationRange', alias='__typename'
+    )
+
+
+class CheckRunAction(BaseModel):
+    """
+    Possible further actions the integrator can perform.
+    """
+
+    description: String
+    identifier: String
+    label: String
+    typename__: Optional[Literal['CheckRunAction']] = Field(
+        'CheckRunAction', alias='__typename'
+    )
+
+
+class CheckRunFilter(BaseModel):
+    """
+    The filters that are available when fetching check runs.
+    """
+
+    appId: Optional[Int] = None
+    checkName: Optional[String] = None
+    checkType: Optional[CheckRunType] = None
+    conclusions: Optional[List[CheckConclusionState]] = None
+    status: Optional[CheckStatusState] = None
+    statuses: Optional[List[CheckStatusState]] = None
+    typename__: Optional[Literal['CheckRunFilter']] = Field(
+        'CheckRunFilter', alias='__typename'
+    )
+
+
+class CheckRunOutput(BaseModel):
+    """
+    Descriptive details about the check run.
+    """
+
+    annotations: Optional[List[CheckAnnotationData]] = None
+    images: Optional[List[CheckRunOutputImage]] = None
+    summary: String
+    text: Optional[String] = None
+    title: String
+    typename__: Optional[Literal['CheckRunOutput']] = Field(
+        'CheckRunOutput', alias='__typename'
+    )
+
+
+class CheckRunOutputImage(BaseModel):
+    """
+    Images attached to the check run output displayed in the GitHub pull request UI.
+    """
+
+    alt: String
+    caption: Optional[String] = None
+    imageUrl: URI
+    typename__: Optional[Literal['CheckRunOutputImage']] = Field(
+        'CheckRunOutputImage', alias='__typename'
+    )
+
+
+class CheckSuiteAutoTriggerPreference(BaseModel):
+    """
+    The auto-trigger preferences that are available for check suites.
+    """
+
+    appId: ID
+    setting: Boolean
+    typename__: Optional[Literal['CheckSuiteAutoTriggerPreference']] = Field(
+        'CheckSuiteAutoTriggerPreference', alias='__typename'
+    )
+
+
+class CheckSuiteFilter(BaseModel):
+    """
+    The filters that are available when fetching check suites.
+    """
+
+    appId: Optional[Int] = None
+    checkName: Optional[String] = None
+    typename__: Optional[Literal['CheckSuiteFilter']] = Field(
+        'CheckSuiteFilter', alias='__typename'
+    )
+
+
+class ClearLabelsFromLabelableInput(BaseModel):
+    """
+    Autogenerated input type of ClearLabelsFromLabelable
+    """
+
+    clientMutationId: Optional[String] = None
+    labelableId: ID
+    typename__: Optional[Literal['ClearLabelsFromLabelableInput']] = Field(
+        'ClearLabelsFromLabelableInput', alias='__typename'
+    )
+
+
+class ClearProjectV2ItemFieldValueInput(BaseModel):
+    """
+    Autogenerated input type of ClearProjectV2ItemFieldValue
+    """
+
+    clientMutationId: Optional[String] = None
+    fieldId: ID
+    itemId: ID
+    projectId: ID
+    typename__: Optional[Literal['ClearProjectV2ItemFieldValueInput']] = Field(
+        'ClearProjectV2ItemFieldValueInput', alias='__typename'
+    )
+
+
+class CloneProjectInput(BaseModel):
+    """
+    Autogenerated input type of CloneProject
+    """
+
+    body: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    includeWorkflows: Boolean
+    name: String
+    public: Optional[Boolean] = None
+    sourceId: ID
+    targetOwnerId: ID
+    typename__: Optional[Literal['CloneProjectInput']] = Field(
+        'CloneProjectInput', alias='__typename'
+    )
+
+
+class CloneTemplateRepositoryInput(BaseModel):
+    """
+    Autogenerated input type of CloneTemplateRepository
+    """
+
+    clientMutationId: Optional[String] = None
+    description: Optional[String] = None
+    includeAllBranches: Optional[Boolean] = False
+    name: String
+    ownerId: ID
+    repositoryId: ID
+    visibility: RepositoryVisibility
+    typename__: Optional[Literal['CloneTemplateRepositoryInput']] = Field(
+        'CloneTemplateRepositoryInput', alias='__typename'
+    )
+
+
+class CloseDiscussionInput(BaseModel):
+    """
+    Autogenerated input type of CloseDiscussion
+    """
+
+    clientMutationId: Optional[String] = None
+    discussionId: ID
+    reason: Optional[DiscussionCloseReason] = 'RESOLVED'
+    typename__: Optional[Literal['CloseDiscussionInput']] = Field(
+        'CloseDiscussionInput', alias='__typename'
+    )
+
+
+class CloseIssueInput(BaseModel):
+    """
+    Autogenerated input type of CloseIssue
+    """
+
+    clientMutationId: Optional[String] = None
+    issueId: ID
+    stateReason: Optional[IssueClosedStateReason] = None
+    typename__: Optional[Literal['CloseIssueInput']] = Field(
+        'CloseIssueInput', alias='__typename'
+    )
+
+
+class ClosePullRequestInput(BaseModel):
+    """
+    Autogenerated input type of ClosePullRequest
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequestId: ID
+    typename__: Optional[Literal['ClosePullRequestInput']] = Field(
+        'ClosePullRequestInput', alias='__typename'
+    )
+
+
+class CommitAuthor(BaseModel):
+    """
+    Specifies an author for filtering Git commits.
+    """
+
+    emails: Optional[List[String]] = None
+    id: Optional[ID] = None
+    typename__: Optional[Literal['CommitAuthor']] = Field(
+        'CommitAuthor', alias='__typename'
+    )
+
+
+class CommitAuthorEmailPatternParametersInput(BaseModel):
+    """
+    Parameters to be used for the commit_author_email_pattern rule
+    """
+
+    name: Optional[String] = None
+    negate: Optional[Boolean] = None
+    operator: String
+    pattern: String
+    typename__: Optional[Literal['CommitAuthorEmailPatternParametersInput']] = Field(
+        'CommitAuthorEmailPatternParametersInput', alias='__typename'
+    )
+
+
+class CommitContributionOrder(BaseModel):
+    """
+    Ordering options for commit contribution connections.
+    """
+
+    direction: OrderDirection
+    field: CommitContributionOrderField
+    typename__: Optional[Literal['CommitContributionOrder']] = Field(
+        'CommitContributionOrder', alias='__typename'
+    )
+
+
+class CommitMessage(BaseModel):
+    """
+    A message to include with a new commit
+    """
+
+    body: Optional[String] = None
+    headline: String
+    typename__: Optional[Literal['CommitMessage']] = Field(
+        'CommitMessage', alias='__typename'
+    )
+
+
+class CommitMessagePatternParametersInput(BaseModel):
+    """
+    Parameters to be used for the commit_message_pattern rule
+    """
+
+    name: Optional[String] = None
+    negate: Optional[Boolean] = None
+    operator: String
+    pattern: String
+    typename__: Optional[Literal['CommitMessagePatternParametersInput']] = Field(
+        'CommitMessagePatternParametersInput', alias='__typename'
+    )
+
+
+class CommittableBranch(BaseModel):
+    """
+    A git ref for a commit to be appended to.
+
+    The ref must be a branch, i.e. its fully qualified name must start
+    with `refs/heads/` (although the input is not required to be fully
+    qualified).
+
+    The Ref may be specified by its global node ID or by the
+    `repositoryNameWithOwner` and `branchName`.
+
+    ### Examples
+
+    Specify a branch using a global node ID:
+
+        { "id": "MDM6UmVmMTpyZWZzL2hlYWRzL21haW4=" }
+
+    Specify a branch using `repositoryNameWithOwner` and `branchName`:
+
+        {
+          "repositoryNameWithOwner": "github/graphql-client",
+          "branchName": "main"
+        }
+    """
+
+    branchName: Optional[String] = None
+    id: Optional[ID] = None
+    repositoryNameWithOwner: Optional[String] = None
+    typename__: Optional[Literal['CommittableBranch']] = Field(
+        'CommittableBranch', alias='__typename'
+    )
+
+
+class CommitterEmailPatternParametersInput(BaseModel):
+    """
+    Parameters to be used for the committer_email_pattern rule
+    """
+
+    name: Optional[String] = None
+    negate: Optional[Boolean] = None
+    operator: String
+    pattern: String
+    typename__: Optional[Literal['CommitterEmailPatternParametersInput']] = Field(
+        'CommitterEmailPatternParametersInput', alias='__typename'
+    )
+
+
+class ContributionOrder(BaseModel):
+    """
+    Ordering options for contribution connections.
+    """
+
+    direction: OrderDirection
+    typename__: Optional[Literal['ContributionOrder']] = Field(
+        'ContributionOrder', alias='__typename'
+    )
+
+
+class ConvertProjectCardNoteToIssueInput(BaseModel):
+    """
+    Autogenerated input type of ConvertProjectCardNoteToIssue
+    """
+
+    body: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    projectCardId: ID
+    repositoryId: ID
+    title: Optional[String] = None
+    typename__: Optional[Literal['ConvertProjectCardNoteToIssueInput']] = Field(
+        'ConvertProjectCardNoteToIssueInput', alias='__typename'
+    )
+
+
+class ConvertPullRequestToDraftInput(BaseModel):
+    """
+    Autogenerated input type of ConvertPullRequestToDraft
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequestId: ID
+    typename__: Optional[Literal['ConvertPullRequestToDraftInput']] = Field(
+        'ConvertPullRequestToDraftInput', alias='__typename'
+    )
+
+
+class CopyProjectV2Input(BaseModel):
+    """
+    Autogenerated input type of CopyProjectV2
+    """
+
+    clientMutationId: Optional[String] = None
+    includeDraftIssues: Optional[Boolean] = False
+    ownerId: ID
+    projectId: ID
+    title: String
+    typename__: Optional[Literal['CopyProjectV2Input']] = Field(
+        'CopyProjectV2Input', alias='__typename'
+    )
+
+
+class CreateAttributionInvitationInput(BaseModel):
+    """
+    Autogenerated input type of CreateAttributionInvitation
+    """
+
+    clientMutationId: Optional[String] = None
+    ownerId: ID
+    sourceId: ID
+    targetId: ID
+    typename__: Optional[Literal['CreateAttributionInvitationInput']] = Field(
+        'CreateAttributionInvitationInput', alias='__typename'
+    )
+
+
+class CreateBranchProtectionRuleInput(BaseModel):
+    """
+    Autogenerated input type of CreateBranchProtectionRule
+    """
+
+    allowsDeletions: Optional[Boolean] = None
+    allowsForcePushes: Optional[Boolean] = None
+    blocksCreations: Optional[Boolean] = None
+    bypassForcePushActorIds: Optional[List[ID]] = None
+    bypassPullRequestActorIds: Optional[List[ID]] = None
+    clientMutationId: Optional[String] = None
+    dismissesStaleReviews: Optional[Boolean] = None
+    isAdminEnforced: Optional[Boolean] = None
+    lockAllowsFetchAndMerge: Optional[Boolean] = None
+    lockBranch: Optional[Boolean] = None
+    pattern: String
+    pushActorIds: Optional[List[ID]] = None
+    repositoryId: ID
+    requireLastPushApproval: Optional[Boolean] = None
+    requiredApprovingReviewCount: Optional[Int] = None
+    requiredDeploymentEnvironments: Optional[List[String]] = None
+    requiredStatusCheckContexts: Optional[List[String]] = None
+    requiredStatusChecks: Optional[List[RequiredStatusCheckInput]] = None
+    requiresApprovingReviews: Optional[Boolean] = None
+    requiresCodeOwnerReviews: Optional[Boolean] = None
+    requiresCommitSignatures: Optional[Boolean] = None
+    requiresConversationResolution: Optional[Boolean] = None
+    requiresDeployments: Optional[Boolean] = None
+    requiresLinearHistory: Optional[Boolean] = None
+    requiresStatusChecks: Optional[Boolean] = None
+    requiresStrictStatusChecks: Optional[Boolean] = None
+    restrictsPushes: Optional[Boolean] = None
+    restrictsReviewDismissals: Optional[Boolean] = None
+    reviewDismissalActorIds: Optional[List[ID]] = None
+    typename__: Optional[Literal['CreateBranchProtectionRuleInput']] = Field(
+        'CreateBranchProtectionRuleInput', alias='__typename'
+    )
+
+
+class CreateCheckRunInput(BaseModel):
+    """
+    Autogenerated input type of CreateCheckRun
+    """
+
+    actions: Optional[List[CheckRunAction]] = None
+    clientMutationId: Optional[String] = None
+    completedAt: Optional[DateTime] = None
+    conclusion: Optional[CheckConclusionState] = None
+    detailsUrl: Optional[URI] = None
+    externalId: Optional[String] = None
+    headSha: GitObjectID
+    name: String
+    output: Optional[CheckRunOutput] = None
+    repositoryId: ID
+    startedAt: Optional[DateTime] = None
+    status: Optional[RequestableCheckStatusState] = None
+    typename__: Optional[Literal['CreateCheckRunInput']] = Field(
+        'CreateCheckRunInput', alias='__typename'
+    )
+
+
+class CreateCheckSuiteInput(BaseModel):
+    """
+    Autogenerated input type of CreateCheckSuite
+    """
+
+    clientMutationId: Optional[String] = None
+    headSha: GitObjectID
+    repositoryId: ID
+    typename__: Optional[Literal['CreateCheckSuiteInput']] = Field(
+        'CreateCheckSuiteInput', alias='__typename'
+    )
+
+
+class CreateCommitOnBranchInput(BaseModel):
+    """
+    Autogenerated input type of CreateCommitOnBranch
+    """
+
+    branch: CommittableBranch
+    clientMutationId: Optional[String] = None
+    expectedHeadOid: GitObjectID
+    fileChanges: Optional[FileChanges] = None
+    message: CommitMessage
+    typename__: Optional[Literal['CreateCommitOnBranchInput']] = Field(
+        'CreateCommitOnBranchInput', alias='__typename'
+    )
+
+
+class CreateDeploymentInput(BaseModel):
+    """
+    Autogenerated input type of CreateDeployment
+    """
+
+    autoMerge: Optional[Boolean] = True
+    clientMutationId: Optional[String] = None
+    description: Optional[String] = ''
+    environment: Optional[String] = 'production'
+    payload: Optional[String] = '{}'
+    refId: ID
+    repositoryId: ID
+    requiredContexts: Optional[List[String]] = None
+    task: Optional[String] = 'deploy'
+    typename__: Optional[Literal['CreateDeploymentInput']] = Field(
+        'CreateDeploymentInput', alias='__typename'
+    )
+
+
+class CreateDeploymentStatusInput(BaseModel):
+    """
+    Autogenerated input type of CreateDeploymentStatus
+    """
+
+    autoInactive: Optional[Boolean] = True
+    clientMutationId: Optional[String] = None
+    deploymentId: ID
+    description: Optional[String] = ''
+    environment: Optional[String] = None
+    environmentUrl: Optional[String] = ''
+    logUrl: Optional[String] = ''
+    state: DeploymentStatusState
+    typename__: Optional[Literal['CreateDeploymentStatusInput']] = Field(
+        'CreateDeploymentStatusInput', alias='__typename'
+    )
+
+
+class CreateDiscussionInput(BaseModel):
+    """
+    Autogenerated input type of CreateDiscussion
+    """
+
+    body: String
+    categoryId: ID
+    clientMutationId: Optional[String] = None
+    repositoryId: ID
+    title: String
+    typename__: Optional[Literal['CreateDiscussionInput']] = Field(
+        'CreateDiscussionInput', alias='__typename'
+    )
+
+
+class CreateEnterpriseOrganizationInput(BaseModel):
+    """
+    Autogenerated input type of CreateEnterpriseOrganization
+    """
+
+    adminLogins: List[String]
+    billingEmail: String
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    login: String
+    profileName: String
+    typename__: Optional[Literal['CreateEnterpriseOrganizationInput']] = Field(
+        'CreateEnterpriseOrganizationInput', alias='__typename'
+    )
+
+
+class CreateEnvironmentInput(BaseModel):
+    """
+    Autogenerated input type of CreateEnvironment
+    """
+
+    clientMutationId: Optional[String] = None
+    name: String
+    repositoryId: ID
+    typename__: Optional[Literal['CreateEnvironmentInput']] = Field(
+        'CreateEnvironmentInput', alias='__typename'
+    )
+
+
+class CreateIpAllowListEntryInput(BaseModel):
+    """
+    Autogenerated input type of CreateIpAllowListEntry
+    """
+
+    allowListValue: String
+    clientMutationId: Optional[String] = None
+    isActive: Boolean
+    name: Optional[String] = None
+    ownerId: ID
+    typename__: Optional[Literal['CreateIpAllowListEntryInput']] = Field(
+        'CreateIpAllowListEntryInput', alias='__typename'
+    )
+
+
+class CreateIssueInput(BaseModel):
+    """
+    Autogenerated input type of CreateIssue
+    """
+
+    assigneeIds: Optional[List[ID]] = None
+    body: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    issueTemplate: Optional[String] = None
+    labelIds: Optional[List[ID]] = None
+    milestoneId: Optional[ID] = None
+    projectIds: Optional[List[ID]] = None
+    repositoryId: ID
+    title: String
+    typename__: Optional[Literal['CreateIssueInput']] = Field(
+        'CreateIssueInput', alias='__typename'
+    )
+
+
+class CreateLabelInput(BaseModel):
+    """
+    Autogenerated input type of CreateLabel
+    """
+
+    clientMutationId: Optional[String] = None
+    color: String
+    description: Optional[String] = None
+    name: String
+    repositoryId: ID
+    typename__: Optional[Literal['CreateLabelInput']] = Field(
+        'CreateLabelInput', alias='__typename'
+    )
+
+
+class CreateLinkedBranchInput(BaseModel):
+    """
+    Autogenerated input type of CreateLinkedBranch
+    """
+
+    clientMutationId: Optional[String] = None
+    issueId: ID
+    name: Optional[String] = None
+    oid: GitObjectID
+    repositoryId: Optional[ID] = None
+    typename__: Optional[Literal['CreateLinkedBranchInput']] = Field(
+        'CreateLinkedBranchInput', alias='__typename'
+    )
+
+
+class CreateMigrationSourceInput(BaseModel):
+    """
+    Autogenerated input type of CreateMigrationSource
+    """
+
+    accessToken: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    githubPat: Optional[String] = None
+    name: String
+    ownerId: ID
+    type: MigrationSourceType
+    url: Optional[String] = None
+    typename__: Optional[Literal['CreateMigrationSourceInput']] = Field(
+        'CreateMigrationSourceInput', alias='__typename'
+    )
+
+
+class CreateProjectInput(BaseModel):
+    """
+    Autogenerated input type of CreateProject
+    """
+
+    body: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    name: String
+    ownerId: ID
+    repositoryIds: Optional[List[ID]] = None
+    template: Optional[ProjectTemplate] = None
+    typename__: Optional[Literal['CreateProjectInput']] = Field(
+        'CreateProjectInput', alias='__typename'
+    )
+
+
+class CreateProjectV2FieldInput(BaseModel):
+    """
+    Autogenerated input type of CreateProjectV2Field
+    """
+
+    clientMutationId: Optional[String] = None
+    dataType: ProjectV2CustomFieldType
+    name: String
+    projectId: ID
+    singleSelectOptions: Optional[List[ProjectV2SingleSelectFieldOptionInput]] = None
+    typename__: Optional[Literal['CreateProjectV2FieldInput']] = Field(
+        'CreateProjectV2FieldInput', alias='__typename'
+    )
+
+
+class CreateProjectV2Input(BaseModel):
+    """
+    Autogenerated input type of CreateProjectV2
+    """
+
+    clientMutationId: Optional[String] = None
+    ownerId: ID
+    repositoryId: Optional[ID] = None
+    teamId: Optional[ID] = None
+    title: String
+    typename__: Optional[Literal['CreateProjectV2Input']] = Field(
+        'CreateProjectV2Input', alias='__typename'
+    )
+
+
+class CreatePullRequestInput(BaseModel):
+    """
+    Autogenerated input type of CreatePullRequest
+    """
+
+    baseRefName: String
+    body: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    draft: Optional[Boolean] = False
+    headRefName: String
+    headRepositoryId: Optional[ID] = None
+    maintainerCanModify: Optional[Boolean] = True
+    repositoryId: ID
+    title: String
+    typename__: Optional[Literal['CreatePullRequestInput']] = Field(
+        'CreatePullRequestInput', alias='__typename'
+    )
+
+
+class CreateRefInput(BaseModel):
+    """
+    Autogenerated input type of CreateRef
+    """
+
+    clientMutationId: Optional[String] = None
+    name: String
+    oid: GitObjectID
+    repositoryId: ID
+    typename__: Optional[Literal['CreateRefInput']] = Field(
+        'CreateRefInput', alias='__typename'
+    )
+
+
+class CreateRepositoryInput(BaseModel):
+    """
+    Autogenerated input type of CreateRepository
+    """
+
+    clientMutationId: Optional[String] = None
+    description: Optional[String] = None
+    hasIssuesEnabled: Optional[Boolean] = True
+    hasWikiEnabled: Optional[Boolean] = False
+    homepageUrl: Optional[URI] = None
+    name: String
+    ownerId: Optional[ID] = None
+    teamId: Optional[ID] = None
+    template: Optional[Boolean] = False
+    visibility: RepositoryVisibility
+    typename__: Optional[Literal['CreateRepositoryInput']] = Field(
+        'CreateRepositoryInput', alias='__typename'
+    )
+
+
+class CreateRepositoryRulesetInput(BaseModel):
+    """
+    Autogenerated input type of CreateRepositoryRuleset
+    """
+
+    bypassActors: Optional[List[RepositoryRulesetBypassActorInput]] = None
+    clientMutationId: Optional[String] = None
+    conditions: RepositoryRuleConditionsInput
+    enforcement: RuleEnforcement
+    name: String
+    rules: Optional[List[RepositoryRuleInput]] = None
+    sourceId: ID
+    target: Optional[RepositoryRulesetTarget] = None
+    typename__: Optional[Literal['CreateRepositoryRulesetInput']] = Field(
+        'CreateRepositoryRulesetInput', alias='__typename'
+    )
+
+
+class CreateSponsorsListingInput(BaseModel):
+    """
+    Autogenerated input type of CreateSponsorsListing
+    """
+
+    billingCountryOrRegionCode: Optional[SponsorsCountryOrRegionCode] = None
+    clientMutationId: Optional[String] = None
+    contactEmail: Optional[String] = None
+    fiscalHostLogin: Optional[String] = None
+    fiscallyHostedProjectProfileUrl: Optional[String] = None
+    fullDescription: Optional[String] = None
+    residenceCountryOrRegionCode: Optional[SponsorsCountryOrRegionCode] = None
+    sponsorableLogin: Optional[String] = None
+    typename__: Optional[Literal['CreateSponsorsListingInput']] = Field(
+        'CreateSponsorsListingInput', alias='__typename'
+    )
+
+
+class CreateSponsorsTierInput(BaseModel):
+    """
+    Autogenerated input type of CreateSponsorsTier
+    """
+
+    amount: Int
+    clientMutationId: Optional[String] = None
+    description: String
+    isRecurring: Optional[Boolean] = True
+    publish: Optional[Boolean] = False
+    repositoryId: Optional[ID] = None
+    repositoryName: Optional[String] = None
+    repositoryOwnerLogin: Optional[String] = None
+    sponsorableId: Optional[ID] = None
+    sponsorableLogin: Optional[String] = None
+    welcomeMessage: Optional[String] = None
+    typename__: Optional[Literal['CreateSponsorsTierInput']] = Field(
+        'CreateSponsorsTierInput', alias='__typename'
+    )
+
+
+class CreateSponsorshipInput(BaseModel):
+    """
+    Autogenerated input type of CreateSponsorship
+    """
+
+    amount: Optional[Int] = None
+    clientMutationId: Optional[String] = None
+    isRecurring: Optional[Boolean] = None
+    privacyLevel: Optional[SponsorshipPrivacy] = 'PUBLIC'
+    receiveEmails: Optional[Boolean] = True
+    sponsorId: Optional[ID] = None
+    sponsorLogin: Optional[String] = None
+    sponsorableId: Optional[ID] = None
+    sponsorableLogin: Optional[String] = None
+    tierId: Optional[ID] = None
+    typename__: Optional[Literal['CreateSponsorshipInput']] = Field(
+        'CreateSponsorshipInput', alias='__typename'
+    )
+
+
+class CreateSponsorshipsInput(BaseModel):
+    """
+    Autogenerated input type of CreateSponsorships
+    """
+
+    clientMutationId: Optional[String] = None
+    privacyLevel: Optional[SponsorshipPrivacy] = 'PUBLIC'
+    receiveEmails: Optional[Boolean] = False
+    sponsorLogin: String
+    sponsorships: List[BulkSponsorship]
+    typename__: Optional[Literal['CreateSponsorshipsInput']] = Field(
+        'CreateSponsorshipsInput', alias='__typename'
+    )
+
+
+class CreateTeamDiscussionCommentInput(BaseModel):
+    """
+    Autogenerated input type of CreateTeamDiscussionComment
+    """
+
+    body: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    discussionId: Optional[ID] = None
+    typename__: Optional[Literal['CreateTeamDiscussionCommentInput']] = Field(
+        'CreateTeamDiscussionCommentInput', alias='__typename'
+    )
+
+
+class CreateTeamDiscussionInput(BaseModel):
+    """
+    Autogenerated input type of CreateTeamDiscussion
+    """
+
+    body: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    private: Optional[Boolean] = None
+    teamId: Optional[ID] = None
+    title: Optional[String] = None
+    typename__: Optional[Literal['CreateTeamDiscussionInput']] = Field(
+        'CreateTeamDiscussionInput', alias='__typename'
+    )
+
+
+class DeclineTopicSuggestionInput(BaseModel):
+    """
+    Autogenerated input type of DeclineTopicSuggestion
+    """
+
+    clientMutationId: Optional[String] = None
+    name: String
+    reason: TopicSuggestionDeclineReason
+    repositoryId: ID
+    typename__: Optional[Literal['DeclineTopicSuggestionInput']] = Field(
+        'DeclineTopicSuggestionInput', alias='__typename'
+    )
+
+
+class DeleteBranchProtectionRuleInput(BaseModel):
+    """
+    Autogenerated input type of DeleteBranchProtectionRule
+    """
+
+    branchProtectionRuleId: ID
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['DeleteBranchProtectionRuleInput']] = Field(
+        'DeleteBranchProtectionRuleInput', alias='__typename'
+    )
+
+
+class DeleteDeploymentInput(BaseModel):
+    """
+    Autogenerated input type of DeleteDeployment
+    """
+
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['DeleteDeploymentInput']] = Field(
+        'DeleteDeploymentInput', alias='__typename'
+    )
+
+
+class DeleteDiscussionCommentInput(BaseModel):
+    """
+    Autogenerated input type of DeleteDiscussionComment
+    """
+
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['DeleteDiscussionCommentInput']] = Field(
+        'DeleteDiscussionCommentInput', alias='__typename'
+    )
+
+
+class DeleteDiscussionInput(BaseModel):
+    """
+    Autogenerated input type of DeleteDiscussion
+    """
+
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['DeleteDiscussionInput']] = Field(
+        'DeleteDiscussionInput', alias='__typename'
+    )
+
+
+class DeleteEnvironmentInput(BaseModel):
+    """
+    Autogenerated input type of DeleteEnvironment
+    """
+
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['DeleteEnvironmentInput']] = Field(
+        'DeleteEnvironmentInput', alias='__typename'
+    )
+
+
+class DeleteIpAllowListEntryInput(BaseModel):
+    """
+    Autogenerated input type of DeleteIpAllowListEntry
+    """
+
+    clientMutationId: Optional[String] = None
+    ipAllowListEntryId: ID
+    typename__: Optional[Literal['DeleteIpAllowListEntryInput']] = Field(
+        'DeleteIpAllowListEntryInput', alias='__typename'
+    )
+
+
+class DeleteIssueCommentInput(BaseModel):
+    """
+    Autogenerated input type of DeleteIssueComment
+    """
+
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['DeleteIssueCommentInput']] = Field(
+        'DeleteIssueCommentInput', alias='__typename'
+    )
+
+
+class DeleteIssueInput(BaseModel):
+    """
+    Autogenerated input type of DeleteIssue
+    """
+
+    clientMutationId: Optional[String] = None
+    issueId: ID
+    typename__: Optional[Literal['DeleteIssueInput']] = Field(
+        'DeleteIssueInput', alias='__typename'
+    )
+
+
+class DeleteLabelInput(BaseModel):
+    """
+    Autogenerated input type of DeleteLabel
+    """
+
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['DeleteLabelInput']] = Field(
+        'DeleteLabelInput', alias='__typename'
+    )
+
+
+class DeleteLinkedBranchInput(BaseModel):
+    """
+    Autogenerated input type of DeleteLinkedBranch
+    """
+
+    clientMutationId: Optional[String] = None
+    linkedBranchId: ID
+    typename__: Optional[Literal['DeleteLinkedBranchInput']] = Field(
+        'DeleteLinkedBranchInput', alias='__typename'
+    )
+
+
+class DeletePackageVersionInput(BaseModel):
+    """
+    Autogenerated input type of DeletePackageVersion
+    """
+
+    clientMutationId: Optional[String] = None
+    packageVersionId: ID
+    typename__: Optional[Literal['DeletePackageVersionInput']] = Field(
+        'DeletePackageVersionInput', alias='__typename'
+    )
+
+
+class DeleteProjectCardInput(BaseModel):
+    """
+    Autogenerated input type of DeleteProjectCard
+    """
+
+    cardId: ID
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['DeleteProjectCardInput']] = Field(
+        'DeleteProjectCardInput', alias='__typename'
+    )
+
+
+class DeleteProjectColumnInput(BaseModel):
+    """
+    Autogenerated input type of DeleteProjectColumn
+    """
+
+    clientMutationId: Optional[String] = None
+    columnId: ID
+    typename__: Optional[Literal['DeleteProjectColumnInput']] = Field(
+        'DeleteProjectColumnInput', alias='__typename'
+    )
+
+
+class DeleteProjectInput(BaseModel):
+    """
+    Autogenerated input type of DeleteProject
+    """
+
+    clientMutationId: Optional[String] = None
+    projectId: ID
+    typename__: Optional[Literal['DeleteProjectInput']] = Field(
+        'DeleteProjectInput', alias='__typename'
+    )
+
+
+class DeleteProjectV2FieldInput(BaseModel):
+    """
+    Autogenerated input type of DeleteProjectV2Field
+    """
+
+    clientMutationId: Optional[String] = None
+    fieldId: ID
+    typename__: Optional[Literal['DeleteProjectV2FieldInput']] = Field(
+        'DeleteProjectV2FieldInput', alias='__typename'
+    )
+
+
+class DeleteProjectV2Input(BaseModel):
+    """
+    Autogenerated input type of DeleteProjectV2
+    """
+
+    clientMutationId: Optional[String] = None
+    projectId: ID
+    typename__: Optional[Literal['DeleteProjectV2Input']] = Field(
+        'DeleteProjectV2Input', alias='__typename'
+    )
+
+
+class DeleteProjectV2ItemInput(BaseModel):
+    """
+    Autogenerated input type of DeleteProjectV2Item
+    """
+
+    clientMutationId: Optional[String] = None
+    itemId: ID
+    projectId: ID
+    typename__: Optional[Literal['DeleteProjectV2ItemInput']] = Field(
+        'DeleteProjectV2ItemInput', alias='__typename'
+    )
+
+
+class DeleteProjectV2WorkflowInput(BaseModel):
+    """
+    Autogenerated input type of DeleteProjectV2Workflow
+    """
+
+    clientMutationId: Optional[String] = None
+    workflowId: ID
+    typename__: Optional[Literal['DeleteProjectV2WorkflowInput']] = Field(
+        'DeleteProjectV2WorkflowInput', alias='__typename'
+    )
+
+
+class DeletePullRequestReviewCommentInput(BaseModel):
+    """
+    Autogenerated input type of DeletePullRequestReviewComment
+    """
+
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['DeletePullRequestReviewCommentInput']] = Field(
+        'DeletePullRequestReviewCommentInput', alias='__typename'
+    )
+
+
+class DeletePullRequestReviewInput(BaseModel):
+    """
+    Autogenerated input type of DeletePullRequestReview
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequestReviewId: ID
+    typename__: Optional[Literal['DeletePullRequestReviewInput']] = Field(
+        'DeletePullRequestReviewInput', alias='__typename'
+    )
+
+
+class DeleteRefInput(BaseModel):
+    """
+    Autogenerated input type of DeleteRef
+    """
+
+    clientMutationId: Optional[String] = None
+    refId: ID
+    typename__: Optional[Literal['DeleteRefInput']] = Field(
+        'DeleteRefInput', alias='__typename'
+    )
+
+
+class DeleteRepositoryRulesetInput(BaseModel):
+    """
+    Autogenerated input type of DeleteRepositoryRuleset
+    """
+
+    clientMutationId: Optional[String] = None
+    repositoryRulesetId: ID
+    typename__: Optional[Literal['DeleteRepositoryRulesetInput']] = Field(
+        'DeleteRepositoryRulesetInput', alias='__typename'
+    )
+
+
+class DeleteTeamDiscussionCommentInput(BaseModel):
+    """
+    Autogenerated input type of DeleteTeamDiscussionComment
+    """
+
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['DeleteTeamDiscussionCommentInput']] = Field(
+        'DeleteTeamDiscussionCommentInput', alias='__typename'
+    )
+
+
+class DeleteTeamDiscussionInput(BaseModel):
+    """
+    Autogenerated input type of DeleteTeamDiscussion
+    """
+
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['DeleteTeamDiscussionInput']] = Field(
+        'DeleteTeamDiscussionInput', alias='__typename'
+    )
+
+
+class DeleteVerifiableDomainInput(BaseModel):
+    """
+    Autogenerated input type of DeleteVerifiableDomain
+    """
+
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['DeleteVerifiableDomainInput']] = Field(
+        'DeleteVerifiableDomainInput', alias='__typename'
+    )
+
+
+class DeploymentOrder(BaseModel):
+    """
+    Ordering options for deployment connections
+    """
+
+    direction: OrderDirection
+    field: DeploymentOrderField
+    typename__: Optional[Literal['DeploymentOrder']] = Field(
+        'DeploymentOrder', alias='__typename'
+    )
+
+
+class DequeuePullRequestInput(BaseModel):
+    """
+    Autogenerated input type of DequeuePullRequest
+    """
+
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['DequeuePullRequestInput']] = Field(
+        'DequeuePullRequestInput', alias='__typename'
+    )
+
+
+class DisablePullRequestAutoMergeInput(BaseModel):
+    """
+    Autogenerated input type of DisablePullRequestAutoMerge
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequestId: ID
+    typename__: Optional[Literal['DisablePullRequestAutoMergeInput']] = Field(
+        'DisablePullRequestAutoMergeInput', alias='__typename'
+    )
+
+
+class DiscussionOrder(BaseModel):
+    """
+    Ways in which lists of discussions can be ordered upon return.
+    """
+
+    direction: OrderDirection
+    field: DiscussionOrderField
+    typename__: Optional[Literal['DiscussionOrder']] = Field(
+        'DiscussionOrder', alias='__typename'
+    )
+
+
+class DiscussionPollOptionOrder(BaseModel):
+    """
+    Ordering options for discussion poll option connections.
+    """
+
+    direction: OrderDirection
+    field: DiscussionPollOptionOrderField
+    typename__: Optional[Literal['DiscussionPollOptionOrder']] = Field(
+        'DiscussionPollOptionOrder', alias='__typename'
+    )
+
+
+class DismissPullRequestReviewInput(BaseModel):
+    """
+    Autogenerated input type of DismissPullRequestReview
+    """
+
+    clientMutationId: Optional[String] = None
+    message: String
+    pullRequestReviewId: ID
+    typename__: Optional[Literal['DismissPullRequestReviewInput']] = Field(
+        'DismissPullRequestReviewInput', alias='__typename'
+    )
+
+
+class DismissRepositoryVulnerabilityAlertInput(BaseModel):
+    """
+    Autogenerated input type of DismissRepositoryVulnerabilityAlert
+    """
+
+    clientMutationId: Optional[String] = None
+    dismissReason: DismissReason
+    repositoryVulnerabilityAlertId: ID
+    typename__: Optional[Literal['DismissRepositoryVulnerabilityAlertInput']] = Field(
+        'DismissRepositoryVulnerabilityAlertInput', alias='__typename'
+    )
+
+
+class DraftPullRequestReviewComment(BaseModel):
+    """
+    Specifies a review comment to be left with a Pull Request Review.
+    """
+
+    body: String
+    path: String
+    position: Int
+    typename__: Optional[Literal['DraftPullRequestReviewComment']] = Field(
+        'DraftPullRequestReviewComment', alias='__typename'
+    )
+
+
+class DraftPullRequestReviewThread(BaseModel):
+    """
+    Specifies a review comment thread to be left with a Pull Request Review.
+    """
+
+    body: String
+    line: Int
+    path: String
+    side: Optional[DiffSide] = 'RIGHT'
+    startLine: Optional[Int] = None
+    startSide: Optional[DiffSide] = 'RIGHT'
+    typename__: Optional[Literal['DraftPullRequestReviewThread']] = Field(
+        'DraftPullRequestReviewThread', alias='__typename'
+    )
+
+
+class EnablePullRequestAutoMergeInput(BaseModel):
+    """
+    Autogenerated input type of EnablePullRequestAutoMerge
+    """
+
+    authorEmail: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    commitBody: Optional[String] = None
+    commitHeadline: Optional[String] = None
+    expectedHeadOid: Optional[GitObjectID] = None
+    mergeMethod: Optional[PullRequestMergeMethod] = 'MERGE'
+    pullRequestId: ID
+    typename__: Optional[Literal['EnablePullRequestAutoMergeInput']] = Field(
+        'EnablePullRequestAutoMergeInput', alias='__typename'
+    )
+
+
+class EnqueuePullRequestInput(BaseModel):
+    """
+    Autogenerated input type of EnqueuePullRequest
+    """
+
+    clientMutationId: Optional[String] = None
+    expectedHeadOid: Optional[GitObjectID] = None
+    jump: Optional[Boolean] = None
+    pullRequestId: ID
+    typename__: Optional[Literal['EnqueuePullRequestInput']] = Field(
+        'EnqueuePullRequestInput', alias='__typename'
+    )
+
+
+class EnterpriseAdministratorInvitationOrder(BaseModel):
+    """
+    Ordering options for enterprise administrator invitation connections
+    """
+
+    direction: OrderDirection
+    field: EnterpriseAdministratorInvitationOrderField
+    typename__: Optional[Literal['EnterpriseAdministratorInvitationOrder']] = Field(
+        'EnterpriseAdministratorInvitationOrder', alias='__typename'
+    )
+
+
+class EnterpriseMemberOrder(BaseModel):
+    """
+    Ordering options for enterprise member connections.
+    """
+
+    direction: OrderDirection
+    field: EnterpriseMemberOrderField
+    typename__: Optional[Literal['EnterpriseMemberOrder']] = Field(
+        'EnterpriseMemberOrder', alias='__typename'
+    )
+
+
+class EnterpriseOrder(BaseModel):
+    """
+    Ordering options for enterprises.
+    """
+
+    direction: OrderDirection
+    field: EnterpriseOrderField
+    typename__: Optional[Literal['EnterpriseOrder']] = Field(
+        'EnterpriseOrder', alias='__typename'
+    )
+
+
+class EnterpriseServerInstallationOrder(BaseModel):
+    """
+    Ordering options for Enterprise Server installation connections.
+    """
+
+    direction: OrderDirection
+    field: EnterpriseServerInstallationOrderField
+    typename__: Optional[Literal['EnterpriseServerInstallationOrder']] = Field(
+        'EnterpriseServerInstallationOrder', alias='__typename'
+    )
+
+
+class EnterpriseServerUserAccountEmailOrder(BaseModel):
+    """
+    Ordering options for Enterprise Server user account email connections.
+    """
+
+    direction: OrderDirection
+    field: EnterpriseServerUserAccountEmailOrderField
+    typename__: Optional[Literal['EnterpriseServerUserAccountEmailOrder']] = Field(
+        'EnterpriseServerUserAccountEmailOrder', alias='__typename'
+    )
+
+
+class EnterpriseServerUserAccountOrder(BaseModel):
+    """
+    Ordering options for Enterprise Server user account connections.
+    """
+
+    direction: OrderDirection
+    field: EnterpriseServerUserAccountOrderField
+    typename__: Optional[Literal['EnterpriseServerUserAccountOrder']] = Field(
+        'EnterpriseServerUserAccountOrder', alias='__typename'
+    )
+
+
+class EnterpriseServerUserAccountsUploadOrder(BaseModel):
+    """
+    Ordering options for Enterprise Server user accounts upload connections.
+    """
+
+    direction: OrderDirection
+    field: EnterpriseServerUserAccountsUploadOrderField
+    typename__: Optional[Literal['EnterpriseServerUserAccountsUploadOrder']] = Field(
+        'EnterpriseServerUserAccountsUploadOrder', alias='__typename'
+    )
+
+
+class Environments(BaseModel):
+    """
+    Ordering options for environments
+    """
+
+    direction: OrderDirection
+    field: EnvironmentOrderField
+    typename__: Optional[Literal['Environments']] = Field(
+        'Environments', alias='__typename'
+    )
+
+
+class FileAddition(BaseModel):
+    """
+    A command to add a file at the given path with the given contents as part of a
+    commit.  Any existing file at that that path will be replaced.
+    """
+
+    contents: Base64String
+    path: String
+    typename__: Optional[Literal['FileAddition']] = Field(
+        'FileAddition', alias='__typename'
+    )
+
+
+class FileChanges(BaseModel):
+    """
+    A description of a set of changes to a file tree to be made as part of
+    a git commit, modeled as zero or more file `additions` and zero or more
+    file `deletions`.
+
+    Both fields are optional; omitting both will produce a commit with no
+    file changes.
+
+    `deletions` and `additions` describe changes to files identified
+    by their path in the git tree using unix-style path separators, i.e.
+    `/`.  The root of a git tree is an empty string, so paths are not
+    slash-prefixed.
+
+    `path` values must be unique across all `additions` and `deletions`
+    provided.  Any duplication will result in a validation error.
+
+    ### Encoding
+
+    File contents must be provided in full for each `FileAddition`.
+
+    The `contents` of a `FileAddition` must be encoded using RFC 4648
+    compliant base64, i.e. correct padding is required and no characters
+    outside the standard alphabet may be used.  Invalid base64
+    encoding will be rejected with a validation error.
+
+    The encoded contents may be binary.
+
+    For text files, no assumptions are made about the character encoding of
+    the file contents (after base64 decoding).  No charset transcoding or
+    line-ending normalization will be performed; it is the client's
+    responsibility to manage the character encoding of files they provide.
+    However, for maximum compatibility we recommend using UTF-8 encoding
+    and ensuring that all files in a repository use a consistent
+    line-ending convention (`\n` or `\r\n`), and that all files end
+    with a newline.
+
+    ### Modeling file changes
+
+    Each of the the five types of conceptual changes that can be made in a
+    git commit can be described using the `FileChanges` type as follows:
+
+    1. New file addition: create file `hello world\n` at path `docs/README.txt`:
+
+           {
+             "additions" [
+               {
+                 "path": "docs/README.txt",
+                 "contents": base64encode("hello world\n")
+               }
+             ]
+           }
+
+    2. Existing file modification: change existing `docs/README.txt` to have new
+       content `new content here\n`:
+
+           {
+             "additions" [
+               {
+                 "path": "docs/README.txt",
+                 "contents": base64encode("new content here\n")
+               }
+             ]
+           }
+
+    3. Existing file deletion: remove existing file `docs/README.txt`.
+       Note that the path is required to exist -- specifying a
+       path that does not exist on the given branch will abort the
+       commit and return an error.
+
+           {
+             "deletions" [
+               {
+                 "path": "docs/README.txt"
+               }
+             ]
+           }
+
+
+    4. File rename with no changes: rename `docs/README.txt` with
+       previous content `hello world\n` to the same content at
+       `newdocs/README.txt`:
+
+           {
+             "deletions" [
+               {
+                 "path": "docs/README.txt",
+               }
+             ],
+             "additions" [
+               {
+                 "path": "newdocs/README.txt",
+                 "contents": base64encode("hello world\n")
+               }
+             ]
+           }
+
+
+    5. File rename with changes: rename `docs/README.txt` with
+       previous content `hello world\n` to a file at path
+       `newdocs/README.txt` with content `new contents\n`:
+
+           {
+             "deletions" [
+               {
+                 "path": "docs/README.txt",
+               }
+             ],
+             "additions" [
+               {
+                 "path": "newdocs/README.txt",
+                 "contents": base64encode("new contents\n")
+               }
+             ]
+           }
+    """
+
+    additions: Optional[List[FileAddition]] = []
+    deletions: Optional[List[FileDeletion]] = []
+    typename__: Optional[Literal['FileChanges']] = Field(
+        'FileChanges', alias='__typename'
+    )
+
+
+class FileDeletion(BaseModel):
+    """
+    A command to delete the file at the given path as part of a commit.
+    """
+
+    path: String
+    typename__: Optional[Literal['FileDeletion']] = Field(
+        'FileDeletion', alias='__typename'
+    )
+
+
+class FollowOrganizationInput(BaseModel):
+    """
+    Autogenerated input type of FollowOrganization
+    """
+
+    clientMutationId: Optional[String] = None
+    organizationId: ID
+    typename__: Optional[Literal['FollowOrganizationInput']] = Field(
+        'FollowOrganizationInput', alias='__typename'
+    )
+
+
+class FollowUserInput(BaseModel):
+    """
+    Autogenerated input type of FollowUser
+    """
+
+    clientMutationId: Optional[String] = None
+    userId: ID
+    typename__: Optional[Literal['FollowUserInput']] = Field(
+        'FollowUserInput', alias='__typename'
+    )
+
+
+class GistOrder(BaseModel):
+    """
+    Ordering options for gist connections
+    """
+
+    direction: OrderDirection
+    field: GistOrderField
+    typename__: Optional[Literal['GistOrder']] = Field('GistOrder', alias='__typename')
+
+
+class GrantEnterpriseOrganizationsMigratorRoleInput(BaseModel):
+    """
+    Autogenerated input type of GrantEnterpriseOrganizationsMigratorRole
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    login: String
+    typename__: Optional[
+        Literal['GrantEnterpriseOrganizationsMigratorRoleInput']
+    ] = Field('GrantEnterpriseOrganizationsMigratorRoleInput', alias='__typename')
+
+
+class GrantMigratorRoleInput(BaseModel):
+    """
+    Autogenerated input type of GrantMigratorRole
+    """
+
+    actor: String
+    actorType: ActorType
+    clientMutationId: Optional[String] = None
+    organizationId: ID
+    typename__: Optional[Literal['GrantMigratorRoleInput']] = Field(
+        'GrantMigratorRoleInput', alias='__typename'
+    )
+
+
+class ImportProjectInput(BaseModel):
+    """
+    Autogenerated input type of ImportProject
+    """
+
+    body: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    columnImports: List[ProjectColumnImport]
+    name: String
+    ownerName: String
+    public: Optional[Boolean] = False
+    typename__: Optional[Literal['ImportProjectInput']] = Field(
+        'ImportProjectInput', alias='__typename'
+    )
+
+
+class InviteEnterpriseAdminInput(BaseModel):
+    """
+    Autogenerated input type of InviteEnterpriseAdmin
+    """
+
+    clientMutationId: Optional[String] = None
+    email: Optional[String] = None
+    enterpriseId: ID
+    invitee: Optional[String] = None
+    role: Optional[EnterpriseAdministratorRole] = None
+    typename__: Optional[Literal['InviteEnterpriseAdminInput']] = Field(
+        'InviteEnterpriseAdminInput', alias='__typename'
+    )
+
+
+class IpAllowListEntryOrder(BaseModel):
+    """
+    Ordering options for IP allow list entry connections.
+    """
+
+    direction: OrderDirection
+    field: IpAllowListEntryOrderField
+    typename__: Optional[Literal['IpAllowListEntryOrder']] = Field(
+        'IpAllowListEntryOrder', alias='__typename'
+    )
+
+
+class IssueCommentOrder(BaseModel):
+    """
+    Ways in which lists of issue comments can be ordered upon return.
+    """
+
+    direction: OrderDirection
+    field: IssueCommentOrderField
+    typename__: Optional[Literal['IssueCommentOrder']] = Field(
+        'IssueCommentOrder', alias='__typename'
+    )
+
+
+class IssueFilters(BaseModel):
+    """
+    Ways in which to filter lists of issues.
+    """
+
+    assignee: Optional[String] = None
+    createdBy: Optional[String] = None
+    labels: Optional[List[String]] = None
+    mentioned: Optional[String] = None
+    milestone: Optional[String] = None
+    milestoneNumber: Optional[String] = None
+    since: Optional[DateTime] = None
+    states: Optional[List[IssueState]] = None
+    viewerSubscribed: Optional[Boolean] = False
+    typename__: Optional[Literal['IssueFilters']] = Field(
+        'IssueFilters', alias='__typename'
+    )
+
+
+class IssueOrder(BaseModel):
+    """
+    Ways in which lists of issues can be ordered upon return.
+    """
+
+    direction: OrderDirection
+    field: IssueOrderField
+    typename__: Optional[Literal['IssueOrder']] = Field(
+        'IssueOrder', alias='__typename'
+    )
+
+
+class LabelOrder(BaseModel):
+    """
+    Ways in which lists of labels can be ordered upon return.
+    """
+
+    direction: OrderDirection
+    field: LabelOrderField
+    typename__: Optional[Literal['LabelOrder']] = Field(
+        'LabelOrder', alias='__typename'
+    )
+
+
+class LanguageOrder(BaseModel):
+    """
+    Ordering options for language connections.
+    """
+
+    direction: OrderDirection
+    field: LanguageOrderField
+    typename__: Optional[Literal['LanguageOrder']] = Field(
+        'LanguageOrder', alias='__typename'
+    )
+
+
+class LinkProjectV2ToRepositoryInput(BaseModel):
+    """
+    Autogenerated input type of LinkProjectV2ToRepository
+    """
+
+    clientMutationId: Optional[String] = None
+    projectId: ID
+    repositoryId: ID
+    typename__: Optional[Literal['LinkProjectV2ToRepositoryInput']] = Field(
+        'LinkProjectV2ToRepositoryInput', alias='__typename'
+    )
+
+
+class LinkProjectV2ToTeamInput(BaseModel):
+    """
+    Autogenerated input type of LinkProjectV2ToTeam
+    """
+
+    clientMutationId: Optional[String] = None
+    projectId: ID
+    teamId: ID
+    typename__: Optional[Literal['LinkProjectV2ToTeamInput']] = Field(
+        'LinkProjectV2ToTeamInput', alias='__typename'
+    )
+
+
+class LinkRepositoryToProjectInput(BaseModel):
+    """
+    Autogenerated input type of LinkRepositoryToProject
+    """
+
+    clientMutationId: Optional[String] = None
+    projectId: ID
+    repositoryId: ID
+    typename__: Optional[Literal['LinkRepositoryToProjectInput']] = Field(
+        'LinkRepositoryToProjectInput', alias='__typename'
+    )
+
+
+class LockLockableInput(BaseModel):
+    """
+    Autogenerated input type of LockLockable
+    """
+
+    clientMutationId: Optional[String] = None
+    lockReason: Optional[LockReason] = None
+    lockableId: ID
+    typename__: Optional[Literal['LockLockableInput']] = Field(
+        'LockLockableInput', alias='__typename'
+    )
+
+
+class MannequinOrder(BaseModel):
+    """
+    Ordering options for mannequins.
+    """
+
+    direction: OrderDirection
+    field: MannequinOrderField
+    typename__: Optional[Literal['MannequinOrder']] = Field(
+        'MannequinOrder', alias='__typename'
+    )
+
+
+class MarkDiscussionCommentAsAnswerInput(BaseModel):
+    """
+    Autogenerated input type of MarkDiscussionCommentAsAnswer
+    """
+
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['MarkDiscussionCommentAsAnswerInput']] = Field(
+        'MarkDiscussionCommentAsAnswerInput', alias='__typename'
+    )
+
+
+class MarkFileAsViewedInput(BaseModel):
+    """
+    Autogenerated input type of MarkFileAsViewed
+    """
+
+    clientMutationId: Optional[String] = None
+    path: String
+    pullRequestId: ID
+    typename__: Optional[Literal['MarkFileAsViewedInput']] = Field(
+        'MarkFileAsViewedInput', alias='__typename'
+    )
+
+
+class MarkProjectV2AsTemplateInput(BaseModel):
+    """
+    Autogenerated input type of MarkProjectV2AsTemplate
+    """
+
+    clientMutationId: Optional[String] = None
+    projectId: ID
+    typename__: Optional[Literal['MarkProjectV2AsTemplateInput']] = Field(
+        'MarkProjectV2AsTemplateInput', alias='__typename'
+    )
+
+
+class MarkPullRequestReadyForReviewInput(BaseModel):
+    """
+    Autogenerated input type of MarkPullRequestReadyForReview
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequestId: ID
+    typename__: Optional[Literal['MarkPullRequestReadyForReviewInput']] = Field(
+        'MarkPullRequestReadyForReviewInput', alias='__typename'
+    )
+
+
+class MergeBranchInput(BaseModel):
+    """
+    Autogenerated input type of MergeBranch
+    """
+
+    authorEmail: Optional[String] = None
+    base: String
+    clientMutationId: Optional[String] = None
+    commitMessage: Optional[String] = None
+    head: String
+    repositoryId: ID
+    typename__: Optional[Literal['MergeBranchInput']] = Field(
+        'MergeBranchInput', alias='__typename'
+    )
+
+
+class MergePullRequestInput(BaseModel):
+    """
+    Autogenerated input type of MergePullRequest
+    """
+
+    authorEmail: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    commitBody: Optional[String] = None
+    commitHeadline: Optional[String] = None
+    expectedHeadOid: Optional[GitObjectID] = None
+    mergeMethod: Optional[PullRequestMergeMethod] = 'MERGE'
+    pullRequestId: ID
+    typename__: Optional[Literal['MergePullRequestInput']] = Field(
+        'MergePullRequestInput', alias='__typename'
+    )
+
+
+class MilestoneOrder(BaseModel):
+    """
+    Ordering options for milestone connections.
+    """
+
+    direction: OrderDirection
+    field: MilestoneOrderField
+    typename__: Optional[Literal['MilestoneOrder']] = Field(
+        'MilestoneOrder', alias='__typename'
+    )
+
+
+class MinimizeCommentInput(BaseModel):
+    """
+    Autogenerated input type of MinimizeComment
+    """
+
+    classifier: ReportedContentClassifiers
+    clientMutationId: Optional[String] = None
+    subjectId: ID
+    typename__: Optional[Literal['MinimizeCommentInput']] = Field(
+        'MinimizeCommentInput', alias='__typename'
+    )
+
+
+class MoveProjectCardInput(BaseModel):
+    """
+    Autogenerated input type of MoveProjectCard
+    """
+
+    afterCardId: Optional[ID] = None
+    cardId: ID
+    clientMutationId: Optional[String] = None
+    columnId: ID
+    typename__: Optional[Literal['MoveProjectCardInput']] = Field(
+        'MoveProjectCardInput', alias='__typename'
+    )
+
+
+class MoveProjectColumnInput(BaseModel):
+    """
+    Autogenerated input type of MoveProjectColumn
+    """
+
+    afterColumnId: Optional[ID] = None
+    clientMutationId: Optional[String] = None
+    columnId: ID
+    typename__: Optional[Literal['MoveProjectColumnInput']] = Field(
+        'MoveProjectColumnInput', alias='__typename'
+    )
+
+
+class OrgEnterpriseOwnerOrder(BaseModel):
+    """
+    Ordering options for an organization's enterprise owner connections.
+    """
+
+    direction: OrderDirection
+    field: OrgEnterpriseOwnerOrderField
+    typename__: Optional[Literal['OrgEnterpriseOwnerOrder']] = Field(
+        'OrgEnterpriseOwnerOrder', alias='__typename'
+    )
+
+
+class OrganizationOrder(BaseModel):
+    """
+    Ordering options for organization connections.
+    """
+
+    direction: OrderDirection
+    field: OrganizationOrderField
+    typename__: Optional[Literal['OrganizationOrder']] = Field(
+        'OrganizationOrder', alias='__typename'
+    )
+
+
+class PackageFileOrder(BaseModel):
+    """
+    Ways in which lists of package files can be ordered upon return.
+    """
+
+    direction: Optional[OrderDirection] = None
+    field: Optional[PackageFileOrderField] = None
+    typename__: Optional[Literal['PackageFileOrder']] = Field(
+        'PackageFileOrder', alias='__typename'
+    )
+
+
+class PackageOrder(BaseModel):
+    """
+    Ways in which lists of packages can be ordered upon return.
+    """
+
+    direction: Optional[OrderDirection] = None
+    field: Optional[PackageOrderField] = None
+    typename__: Optional[Literal['PackageOrder']] = Field(
+        'PackageOrder', alias='__typename'
+    )
+
+
+class PackageVersionOrder(BaseModel):
+    """
+    Ways in which lists of package versions can be ordered upon return.
+    """
+
+    direction: Optional[OrderDirection] = None
+    field: Optional[PackageVersionOrderField] = None
+    typename__: Optional[Literal['PackageVersionOrder']] = Field(
+        'PackageVersionOrder', alias='__typename'
+    )
+
+
+class PinIssueInput(BaseModel):
+    """
+    Autogenerated input type of PinIssue
+    """
+
+    clientMutationId: Optional[String] = None
+    issueId: ID
+    typename__: Optional[Literal['PinIssueInput']] = Field(
+        'PinIssueInput', alias='__typename'
+    )
+
+
+class ProjectCardImport(BaseModel):
+    """
+    An issue or PR and its owning repository to be used in a project card.
+    """
+
+    number: Int
+    repository: String
+    typename__: Optional[Literal['ProjectCardImport']] = Field(
+        'ProjectCardImport', alias='__typename'
+    )
+
+
+class ProjectColumnImport(BaseModel):
+    """
+    A project column and a list of its issues and PRs.
+    """
+
+    columnName: String
+    issues: Optional[List[ProjectCardImport]] = None
+    position: Int
+    typename__: Optional[Literal['ProjectColumnImport']] = Field(
+        'ProjectColumnImport', alias='__typename'
+    )
+
+
+class ProjectOrder(BaseModel):
+    """
+    Ways in which lists of projects can be ordered upon return.
+    """
+
+    direction: OrderDirection
+    field: ProjectOrderField
+    typename__: Optional[Literal['ProjectOrder']] = Field(
+        'ProjectOrder', alias='__typename'
+    )
+
+
+class ProjectV2Collaborator(BaseModel):
+    """
+    A collaborator to update on a project. Only one of the userId or teamId should be provided.
+    """
+
+    role: ProjectV2Roles
+    teamId: Optional[ID] = None
+    userId: Optional[ID] = None
+    typename__: Optional[Literal['ProjectV2Collaborator']] = Field(
+        'ProjectV2Collaborator', alias='__typename'
+    )
+
+
+class ProjectV2FieldOrder(BaseModel):
+    """
+    Ordering options for project v2 field connections
+    """
+
+    direction: OrderDirection
+    field: ProjectV2FieldOrderField
+    typename__: Optional[Literal['ProjectV2FieldOrder']] = Field(
+        'ProjectV2FieldOrder', alias='__typename'
+    )
+
+
+class ProjectV2FieldValue(BaseModel):
+    """
+    The values that can be used to update a field of an item inside a Project. Only 1 value can be updated at a time.
+    """
+
+    date: Optional[Date] = None
+    iterationId: Optional[String] = None
+    number: Optional[Float] = None
+    singleSelectOptionId: Optional[String] = None
+    text: Optional[String] = None
+    typename__: Optional[Literal['ProjectV2FieldValue']] = Field(
+        'ProjectV2FieldValue', alias='__typename'
+    )
+
+
+class ProjectV2Filters(BaseModel):
+    """
+    Ways in which to filter lists of projects.
+    """
+
+    state: Optional[ProjectV2State] = None
+    typename__: Optional[Literal['ProjectV2Filters']] = Field(
+        'ProjectV2Filters', alias='__typename'
+    )
+
+
+class ProjectV2ItemFieldValueOrder(BaseModel):
+    """
+    Ordering options for project v2 item field value connections
+    """
+
+    direction: OrderDirection
+    field: ProjectV2ItemFieldValueOrderField
+    typename__: Optional[Literal['ProjectV2ItemFieldValueOrder']] = Field(
+        'ProjectV2ItemFieldValueOrder', alias='__typename'
+    )
+
+
+class ProjectV2ItemOrder(BaseModel):
+    """
+    Ordering options for project v2 item connections
+    """
+
+    direction: OrderDirection
+    field: ProjectV2ItemOrderField
+    typename__: Optional[Literal['ProjectV2ItemOrder']] = Field(
+        'ProjectV2ItemOrder', alias='__typename'
+    )
+
+
+class ProjectV2Order(BaseModel):
+    """
+    Ways in which lists of projects can be ordered upon return.
+    """
+
+    direction: OrderDirection
+    field: ProjectV2OrderField
+    typename__: Optional[Literal['ProjectV2Order']] = Field(
+        'ProjectV2Order', alias='__typename'
+    )
+
+
+class ProjectV2SingleSelectFieldOptionInput(BaseModel):
+    """
+    Represents a single select field option
+    """
+
+    color: ProjectV2SingleSelectFieldOptionColor
+    description: String
+    name: String
+    typename__: Optional[Literal['ProjectV2SingleSelectFieldOptionInput']] = Field(
+        'ProjectV2SingleSelectFieldOptionInput', alias='__typename'
+    )
+
+
+class ProjectV2ViewOrder(BaseModel):
+    """
+    Ordering options for project v2 view connections
+    """
+
+    direction: OrderDirection
+    field: ProjectV2ViewOrderField
+    typename__: Optional[Literal['ProjectV2ViewOrder']] = Field(
+        'ProjectV2ViewOrder', alias='__typename'
+    )
+
+
+class ProjectV2WorkflowOrder(BaseModel):
+    """
+    Ordering options for project v2 workflows connections
+    """
+
+    direction: OrderDirection
+    field: ProjectV2WorkflowsOrderField
+    typename__: Optional[Literal['ProjectV2WorkflowOrder']] = Field(
+        'ProjectV2WorkflowOrder', alias='__typename'
+    )
+
+
+class PublishSponsorsTierInput(BaseModel):
+    """
+    Autogenerated input type of PublishSponsorsTier
+    """
+
+    clientMutationId: Optional[String] = None
+    tierId: ID
+    typename__: Optional[Literal['PublishSponsorsTierInput']] = Field(
+        'PublishSponsorsTierInput', alias='__typename'
+    )
+
+
+class PullRequestOrder(BaseModel):
+    """
+    Ways in which lists of issues can be ordered upon return.
+    """
+
+    direction: OrderDirection
+    field: PullRequestOrderField
+    typename__: Optional[Literal['PullRequestOrder']] = Field(
+        'PullRequestOrder', alias='__typename'
+    )
+
+
+class PullRequestParametersInput(BaseModel):
+    """
+    Require all commits be made to a non-target branch and submitted via a pull request before they can be merged.
+    """
+
+    dismissStaleReviewsOnPush: Boolean
+    requireCodeOwnerReview: Boolean
+    requireLastPushApproval: Boolean
+    requiredApprovingReviewCount: Int
+    requiredReviewThreadResolution: Boolean
+    typename__: Optional[Literal['PullRequestParametersInput']] = Field(
+        'PullRequestParametersInput', alias='__typename'
+    )
+
+
+class ReactionOrder(BaseModel):
+    """
+    Ways in which lists of reactions can be ordered upon return.
+    """
+
+    direction: OrderDirection
+    field: ReactionOrderField
+    typename__: Optional[Literal['ReactionOrder']] = Field(
+        'ReactionOrder', alias='__typename'
+    )
+
+
+class RefNameConditionTargetInput(BaseModel):
+    """
+    Parameters to be used for the ref_name condition
+    """
+
+    exclude: List[String]
+    include: List[String]
+    typename__: Optional[Literal['RefNameConditionTargetInput']] = Field(
+        'RefNameConditionTargetInput', alias='__typename'
+    )
+
+
+class RefOrder(BaseModel):
+    """
+    Ways in which lists of git refs can be ordered upon return.
+    """
+
+    direction: OrderDirection
+    field: RefOrderField
+    typename__: Optional[Literal['RefOrder']] = Field('RefOrder', alias='__typename')
+
+
+class RefUpdate(BaseModel):
+    """
+    A ref update
+    """
+
+    afterOid: GitObjectID
+    beforeOid: Optional[GitObjectID] = None
+    force: Optional[Boolean] = False
+    name: GitRefname
+    typename__: Optional[Literal['RefUpdate']] = Field('RefUpdate', alias='__typename')
+
+
+class RegenerateEnterpriseIdentityProviderRecoveryCodesInput(BaseModel):
+    """
+    Autogenerated input type of RegenerateEnterpriseIdentityProviderRecoveryCodes
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    typename__: Optional[
+        Literal['RegenerateEnterpriseIdentityProviderRecoveryCodesInput']
+    ] = Field(
+        'RegenerateEnterpriseIdentityProviderRecoveryCodesInput', alias='__typename'
+    )
+
+
+class RegenerateVerifiableDomainTokenInput(BaseModel):
+    """
+    Autogenerated input type of RegenerateVerifiableDomainToken
+    """
+
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['RegenerateVerifiableDomainTokenInput']] = Field(
+        'RegenerateVerifiableDomainTokenInput', alias='__typename'
+    )
+
+
+class RejectDeploymentsInput(BaseModel):
+    """
+    Autogenerated input type of RejectDeployments
+    """
+
+    clientMutationId: Optional[String] = None
+    comment: Optional[String] = ''
+    environmentIds: List[ID]
+    workflowRunId: ID
+    typename__: Optional[Literal['RejectDeploymentsInput']] = Field(
+        'RejectDeploymentsInput', alias='__typename'
+    )
+
+
+class ReleaseOrder(BaseModel):
+    """
+    Ways in which lists of releases can be ordered upon return.
+    """
+
+    direction: OrderDirection
+    field: ReleaseOrderField
+    typename__: Optional[Literal['ReleaseOrder']] = Field(
+        'ReleaseOrder', alias='__typename'
+    )
+
+
+class RemoveAssigneesFromAssignableInput(BaseModel):
+    """
+    Autogenerated input type of RemoveAssigneesFromAssignable
+    """
+
+    assignableId: ID
+    assigneeIds: List[ID]
+    clientMutationId: Optional[String] = None
+    typename__: Optional[Literal['RemoveAssigneesFromAssignableInput']] = Field(
+        'RemoveAssigneesFromAssignableInput', alias='__typename'
+    )
+
+
+class RemoveEnterpriseAdminInput(BaseModel):
+    """
+    Autogenerated input type of RemoveEnterpriseAdmin
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    login: String
+    typename__: Optional[Literal['RemoveEnterpriseAdminInput']] = Field(
+        'RemoveEnterpriseAdminInput', alias='__typename'
+    )
+
+
+class RemoveEnterpriseIdentityProviderInput(BaseModel):
+    """
+    Autogenerated input type of RemoveEnterpriseIdentityProvider
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    typename__: Optional[Literal['RemoveEnterpriseIdentityProviderInput']] = Field(
+        'RemoveEnterpriseIdentityProviderInput', alias='__typename'
+    )
+
+
+class RemoveEnterpriseMemberInput(BaseModel):
+    """
+    Autogenerated input type of RemoveEnterpriseMember
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    userId: ID
+    typename__: Optional[Literal['RemoveEnterpriseMemberInput']] = Field(
+        'RemoveEnterpriseMemberInput', alias='__typename'
+    )
+
+
+class RemoveEnterpriseOrganizationInput(BaseModel):
+    """
+    Autogenerated input type of RemoveEnterpriseOrganization
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    organizationId: ID
+    typename__: Optional[Literal['RemoveEnterpriseOrganizationInput']] = Field(
+        'RemoveEnterpriseOrganizationInput', alias='__typename'
+    )
+
+
+class RemoveEnterpriseSupportEntitlementInput(BaseModel):
+    """
+    Autogenerated input type of RemoveEnterpriseSupportEntitlement
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    login: String
+    typename__: Optional[Literal['RemoveEnterpriseSupportEntitlementInput']] = Field(
+        'RemoveEnterpriseSupportEntitlementInput', alias='__typename'
+    )
+
+
+class RemoveLabelsFromLabelableInput(BaseModel):
+    """
+    Autogenerated input type of RemoveLabelsFromLabelable
+    """
+
+    clientMutationId: Optional[String] = None
+    labelIds: List[ID]
+    labelableId: ID
+    typename__: Optional[Literal['RemoveLabelsFromLabelableInput']] = Field(
+        'RemoveLabelsFromLabelableInput', alias='__typename'
+    )
+
+
+class RemoveOutsideCollaboratorInput(BaseModel):
+    """
+    Autogenerated input type of RemoveOutsideCollaborator
+    """
+
+    clientMutationId: Optional[String] = None
+    organizationId: ID
+    userId: ID
+    typename__: Optional[Literal['RemoveOutsideCollaboratorInput']] = Field(
+        'RemoveOutsideCollaboratorInput', alias='__typename'
+    )
+
+
+class RemoveReactionInput(BaseModel):
+    """
+    Autogenerated input type of RemoveReaction
+    """
+
+    clientMutationId: Optional[String] = None
+    content: ReactionContent
+    subjectId: ID
+    typename__: Optional[Literal['RemoveReactionInput']] = Field(
+        'RemoveReactionInput', alias='__typename'
+    )
+
+
+class RemoveStarInput(BaseModel):
+    """
+    Autogenerated input type of RemoveStar
+    """
+
+    clientMutationId: Optional[String] = None
+    starrableId: ID
+    typename__: Optional[Literal['RemoveStarInput']] = Field(
+        'RemoveStarInput', alias='__typename'
+    )
+
+
+class RemoveUpvoteInput(BaseModel):
+    """
+    Autogenerated input type of RemoveUpvote
+    """
+
+    clientMutationId: Optional[String] = None
+    subjectId: ID
+    typename__: Optional[Literal['RemoveUpvoteInput']] = Field(
+        'RemoveUpvoteInput', alias='__typename'
+    )
+
+
+class ReopenDiscussionInput(BaseModel):
+    """
+    Autogenerated input type of ReopenDiscussion
+    """
+
+    clientMutationId: Optional[String] = None
+    discussionId: ID
+    typename__: Optional[Literal['ReopenDiscussionInput']] = Field(
+        'ReopenDiscussionInput', alias='__typename'
+    )
+
+
+class ReopenIssueInput(BaseModel):
+    """
+    Autogenerated input type of ReopenIssue
+    """
+
+    clientMutationId: Optional[String] = None
+    issueId: ID
+    typename__: Optional[Literal['ReopenIssueInput']] = Field(
+        'ReopenIssueInput', alias='__typename'
+    )
+
+
+class ReopenPullRequestInput(BaseModel):
+    """
+    Autogenerated input type of ReopenPullRequest
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequestId: ID
+    typename__: Optional[Literal['ReopenPullRequestInput']] = Field(
+        'ReopenPullRequestInput', alias='__typename'
+    )
+
+
+class RepositoryIdConditionTargetInput(BaseModel):
+    """
+    Parameters to be used for the repository_id condition
+    """
+
+    repositoryIds: List[ID]
+    typename__: Optional[Literal['RepositoryIdConditionTargetInput']] = Field(
+        'RepositoryIdConditionTargetInput', alias='__typename'
+    )
+
+
+class RepositoryInvitationOrder(BaseModel):
+    """
+    Ordering options for repository invitation connections.
+    """
+
+    direction: OrderDirection
+    field: RepositoryInvitationOrderField
+    typename__: Optional[Literal['RepositoryInvitationOrder']] = Field(
+        'RepositoryInvitationOrder', alias='__typename'
+    )
+
+
+class RepositoryMigrationOrder(BaseModel):
+    """
+    Ordering options for repository migrations.
+    """
+
+    direction: RepositoryMigrationOrderDirection
+    field: RepositoryMigrationOrderField
+    typename__: Optional[Literal['RepositoryMigrationOrder']] = Field(
+        'RepositoryMigrationOrder', alias='__typename'
+    )
+
+
+class RepositoryNameConditionTargetInput(BaseModel):
+    """
+    Parameters to be used for the repository_name condition
+    """
+
+    exclude: List[String]
+    include: List[String]
+    protected: Optional[Boolean] = None
+    typename__: Optional[Literal['RepositoryNameConditionTargetInput']] = Field(
+        'RepositoryNameConditionTargetInput', alias='__typename'
+    )
+
+
+class RepositoryOrder(BaseModel):
+    """
+    Ordering options for repository connections
+    """
+
+    direction: OrderDirection
+    field: RepositoryOrderField
+    typename__: Optional[Literal['RepositoryOrder']] = Field(
+        'RepositoryOrder', alias='__typename'
+    )
+
+
+class RepositoryRuleConditionsInput(BaseModel):
+    """
+    Specifies the conditions required for a ruleset to evaluate
+    """
+
+    refName: Optional[RefNameConditionTargetInput] = None
+    repositoryId: Optional[RepositoryIdConditionTargetInput] = None
+    repositoryName: Optional[RepositoryNameConditionTargetInput] = None
+    typename__: Optional[Literal['RepositoryRuleConditionsInput']] = Field(
+        'RepositoryRuleConditionsInput', alias='__typename'
+    )
+
+
+class RepositoryRuleInput(BaseModel):
+    """
+    Specifies the attributes for a new or updated rule.
+    """
+
+    id: Optional[ID] = None
+    parameters: Optional[RuleParametersInput] = None
+    type: RepositoryRuleType
+    typename__: Optional[Literal['RepositoryRuleInput']] = Field(
+        'RepositoryRuleInput', alias='__typename'
+    )
+
+
+class RepositoryRulesetBypassActorInput(BaseModel):
+    """
+    Specifies the attributes for a new or updated ruleset bypass actor. Only one of
+    `actor_id`, `repository_role_database_id`, or `organization_admin` should be specified.
+    """
+
+    actorId: Optional[ID] = None
+    bypassMode: RepositoryRulesetBypassActorBypassMode
+    organizationAdmin: Optional[Boolean] = None
+    repositoryRoleDatabaseId: Optional[Int] = None
+    typename__: Optional[Literal['RepositoryRulesetBypassActorInput']] = Field(
+        'RepositoryRulesetBypassActorInput', alias='__typename'
+    )
+
+
+class RequestReviewsInput(BaseModel):
+    """
+    Autogenerated input type of RequestReviews
+    """
+
+    clientMutationId: Optional[String] = None
+    pullRequestId: ID
+    teamIds: Optional[List[ID]] = None
+    union: Optional[Boolean] = False
+    userIds: Optional[List[ID]] = None
+    typename__: Optional[Literal['RequestReviewsInput']] = Field(
+        'RequestReviewsInput', alias='__typename'
+    )
+
+
+class RequiredDeploymentsParametersInput(BaseModel):
+    """
+    Choose which environments must be successfully deployed to before refs can be pushed into a ref that matches this rule.
+    """
+
+    requiredDeploymentEnvironments: List[String]
+    typename__: Optional[Literal['RequiredDeploymentsParametersInput']] = Field(
+        'RequiredDeploymentsParametersInput', alias='__typename'
+    )
+
+
+class RequiredStatusCheckInput(BaseModel):
+    """
+    Specifies the attributes for a new or updated required status check.
+    """
+
+    appId: Optional[ID] = None
+    context: String
+    typename__: Optional[Literal['RequiredStatusCheckInput']] = Field(
+        'RequiredStatusCheckInput', alias='__typename'
+    )
+
+
+class RequiredStatusChecksParametersInput(BaseModel):
+    """
+    Choose which status checks must pass before the ref is updated. When enabled,
+    commits must first be pushed to another ref where the checks pass.
+    """
+
+    requiredStatusChecks: List[StatusCheckConfigurationInput]
+    strictRequiredStatusChecksPolicy: Boolean
+    typename__: Optional[Literal['RequiredStatusChecksParametersInput']] = Field(
+        'RequiredStatusChecksParametersInput', alias='__typename'
+    )
+
+
+class RerequestCheckSuiteInput(BaseModel):
+    """
+    Autogenerated input type of RerequestCheckSuite
+    """
+
+    checkSuiteId: ID
+    clientMutationId: Optional[String] = None
+    repositoryId: ID
+    typename__: Optional[Literal['RerequestCheckSuiteInput']] = Field(
+        'RerequestCheckSuiteInput', alias='__typename'
+    )
+
+
+class ResolveReviewThreadInput(BaseModel):
+    """
+    Autogenerated input type of ResolveReviewThread
+    """
+
+    clientMutationId: Optional[String] = None
+    threadId: ID
+    typename__: Optional[Literal['ResolveReviewThreadInput']] = Field(
+        'ResolveReviewThreadInput', alias='__typename'
+    )
+
+
+class RetireSponsorsTierInput(BaseModel):
+    """
+    Autogenerated input type of RetireSponsorsTier
+    """
+
+    clientMutationId: Optional[String] = None
+    tierId: ID
+    typename__: Optional[Literal['RetireSponsorsTierInput']] = Field(
+        'RetireSponsorsTierInput', alias='__typename'
+    )
+
+
+class RevertPullRequestInput(BaseModel):
+    """
+    Autogenerated input type of RevertPullRequest
+    """
+
+    body: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    draft: Optional[Boolean] = False
+    pullRequestId: ID
+    title: Optional[String] = None
+    typename__: Optional[Literal['RevertPullRequestInput']] = Field(
+        'RevertPullRequestInput', alias='__typename'
+    )
+
+
+class RevokeEnterpriseOrganizationsMigratorRoleInput(BaseModel):
+    """
+    Autogenerated input type of RevokeEnterpriseOrganizationsMigratorRole
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    login: String
+    typename__: Optional[
+        Literal['RevokeEnterpriseOrganizationsMigratorRoleInput']
+    ] = Field('RevokeEnterpriseOrganizationsMigratorRoleInput', alias='__typename')
+
+
+class RevokeMigratorRoleInput(BaseModel):
+    """
+    Autogenerated input type of RevokeMigratorRole
+    """
+
+    actor: String
+    actorType: ActorType
+    clientMutationId: Optional[String] = None
+    organizationId: ID
+    typename__: Optional[Literal['RevokeMigratorRoleInput']] = Field(
+        'RevokeMigratorRoleInput', alias='__typename'
+    )
+
+
+class RuleParametersInput(BaseModel):
+    """
+    Specifies the parameters for a `RepositoryRule` object. Only one of the fields should be specified.
+    """
+
+    branchNamePattern: Optional[BranchNamePatternParametersInput] = None
+    commitAuthorEmailPattern: Optional[CommitAuthorEmailPatternParametersInput] = None
+    commitMessagePattern: Optional[CommitMessagePatternParametersInput] = None
+    committerEmailPattern: Optional[CommitterEmailPatternParametersInput] = None
+    pullRequest: Optional[PullRequestParametersInput] = None
+    requiredDeployments: Optional[RequiredDeploymentsParametersInput] = None
+    requiredStatusChecks: Optional[RequiredStatusChecksParametersInput] = None
+    tagNamePattern: Optional[TagNamePatternParametersInput] = None
+    update: Optional[UpdateParametersInput] = None
+    workflows: Optional[WorkflowsParametersInput] = None
+    typename__: Optional[Literal['RuleParametersInput']] = Field(
+        'RuleParametersInput', alias='__typename'
+    )
+
+
+class SavedReplyOrder(BaseModel):
+    """
+    Ordering options for saved reply connections.
+    """
+
+    direction: OrderDirection
+    field: SavedReplyOrderField
+    typename__: Optional[Literal['SavedReplyOrder']] = Field(
+        'SavedReplyOrder', alias='__typename'
+    )
+
+
+class SecurityAdvisoryIdentifierFilter(BaseModel):
+    """
+    An advisory identifier to filter results on.
+    """
+
+    type: SecurityAdvisoryIdentifierType
+    value: String
+    typename__: Optional[Literal['SecurityAdvisoryIdentifierFilter']] = Field(
+        'SecurityAdvisoryIdentifierFilter', alias='__typename'
+    )
+
+
+class SecurityAdvisoryOrder(BaseModel):
+    """
+    Ordering options for security advisory connections
+    """
+
+    direction: OrderDirection
+    field: SecurityAdvisoryOrderField
+    typename__: Optional[Literal['SecurityAdvisoryOrder']] = Field(
+        'SecurityAdvisoryOrder', alias='__typename'
+    )
+
+
+class SecurityVulnerabilityOrder(BaseModel):
+    """
+    Ordering options for security vulnerability connections
+    """
+
+    direction: OrderDirection
+    field: SecurityVulnerabilityOrderField
+    typename__: Optional[Literal['SecurityVulnerabilityOrder']] = Field(
+        'SecurityVulnerabilityOrder', alias='__typename'
+    )
+
+
+class SetEnterpriseIdentityProviderInput(BaseModel):
+    """
+    Autogenerated input type of SetEnterpriseIdentityProvider
+    """
+
+    clientMutationId: Optional[String] = None
+    digestMethod: SamlDigestAlgorithm
+    enterpriseId: ID
+    idpCertificate: String
+    issuer: Optional[String] = None
+    signatureMethod: SamlSignatureAlgorithm
+    ssoUrl: URI
+    typename__: Optional[Literal['SetEnterpriseIdentityProviderInput']] = Field(
+        'SetEnterpriseIdentityProviderInput', alias='__typename'
+    )
+
+
+class SetOrganizationInteractionLimitInput(BaseModel):
+    """
+    Autogenerated input type of SetOrganizationInteractionLimit
+    """
+
+    clientMutationId: Optional[String] = None
+    expiry: Optional[RepositoryInteractionLimitExpiry] = None
+    limit: RepositoryInteractionLimit
+    organizationId: ID
+    typename__: Optional[Literal['SetOrganizationInteractionLimitInput']] = Field(
+        'SetOrganizationInteractionLimitInput', alias='__typename'
+    )
+
+
+class SetRepositoryInteractionLimitInput(BaseModel):
+    """
+    Autogenerated input type of SetRepositoryInteractionLimit
+    """
+
+    clientMutationId: Optional[String] = None
+    expiry: Optional[RepositoryInteractionLimitExpiry] = None
+    limit: RepositoryInteractionLimit
+    repositoryId: ID
+    typename__: Optional[Literal['SetRepositoryInteractionLimitInput']] = Field(
+        'SetRepositoryInteractionLimitInput', alias='__typename'
+    )
+
+
+class SetUserInteractionLimitInput(BaseModel):
+    """
+    Autogenerated input type of SetUserInteractionLimit
+    """
+
+    clientMutationId: Optional[String] = None
+    expiry: Optional[RepositoryInteractionLimitExpiry] = None
+    limit: RepositoryInteractionLimit
+    userId: ID
+    typename__: Optional[Literal['SetUserInteractionLimitInput']] = Field(
+        'SetUserInteractionLimitInput', alias='__typename'
+    )
+
+
+class SponsorOrder(BaseModel):
+    """
+    Ordering options for connections to get sponsor entities for GitHub Sponsors.
+    """
+
+    direction: OrderDirection
+    field: SponsorOrderField
+    typename__: Optional[Literal['SponsorOrder']] = Field(
+        'SponsorOrder', alias='__typename'
+    )
+
+
+class SponsorableOrder(BaseModel):
+    """
+    Ordering options for connections to get sponsorable entities for GitHub Sponsors.
+    """
+
+    direction: OrderDirection
+    field: SponsorableOrderField
+    typename__: Optional[Literal['SponsorableOrder']] = Field(
+        'SponsorableOrder', alias='__typename'
+    )
+
+
+class SponsorsActivityOrder(BaseModel):
+    """
+    Ordering options for GitHub Sponsors activity connections.
+    """
+
+    direction: OrderDirection
+    field: SponsorsActivityOrderField
+    typename__: Optional[Literal['SponsorsActivityOrder']] = Field(
+        'SponsorsActivityOrder', alias='__typename'
+    )
+
+
+class SponsorsTierOrder(BaseModel):
+    """
+    Ordering options for Sponsors tiers connections.
+    """
+
+    direction: OrderDirection
+    field: SponsorsTierOrderField
+    typename__: Optional[Literal['SponsorsTierOrder']] = Field(
+        'SponsorsTierOrder', alias='__typename'
+    )
+
+
+class SponsorshipNewsletterOrder(BaseModel):
+    """
+    Ordering options for sponsorship newsletter connections.
+    """
+
+    direction: OrderDirection
+    field: SponsorshipNewsletterOrderField
+    typename__: Optional[Literal['SponsorshipNewsletterOrder']] = Field(
+        'SponsorshipNewsletterOrder', alias='__typename'
+    )
+
+
+class SponsorshipOrder(BaseModel):
+    """
+    Ordering options for sponsorship connections.
+    """
+
+    direction: OrderDirection
+    field: SponsorshipOrderField
+    typename__: Optional[Literal['SponsorshipOrder']] = Field(
+        'SponsorshipOrder', alias='__typename'
+    )
+
+
+class StarOrder(BaseModel):
+    """
+    Ways in which star connections can be ordered.
+    """
+
+    direction: OrderDirection
+    field: StarOrderField
+    typename__: Optional[Literal['StarOrder']] = Field('StarOrder', alias='__typename')
+
+
+class StartOrganizationMigrationInput(BaseModel):
+    """
+    Autogenerated input type of StartOrganizationMigration
+    """
+
+    clientMutationId: Optional[String] = None
+    sourceAccessToken: String
+    sourceOrgUrl: URI
+    targetEnterpriseId: ID
+    targetOrgName: String
+    typename__: Optional[Literal['StartOrganizationMigrationInput']] = Field(
+        'StartOrganizationMigrationInput', alias='__typename'
+    )
+
+
+class StartRepositoryMigrationInput(BaseModel):
+    """
+    Autogenerated input type of StartRepositoryMigration
+    """
+
+    accessToken: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    continueOnError: Optional[Boolean] = None
+    gitArchiveUrl: Optional[String] = None
+    githubPat: Optional[String] = None
+    lockSource: Optional[Boolean] = None
+    metadataArchiveUrl: Optional[String] = None
+    ownerId: ID
+    repositoryName: String
+    skipReleases: Optional[Boolean] = None
+    sourceId: ID
+    sourceRepositoryUrl: Optional[URI] = None
+    targetRepoVisibility: Optional[String] = None
+    typename__: Optional[Literal['StartRepositoryMigrationInput']] = Field(
+        'StartRepositoryMigrationInput', alias='__typename'
+    )
+
+
+class StatusCheckConfigurationInput(BaseModel):
+    """
+    Required status check
+    """
+
+    context: String
+    integrationId: Optional[Int] = None
+    typename__: Optional[Literal['StatusCheckConfigurationInput']] = Field(
+        'StatusCheckConfigurationInput', alias='__typename'
+    )
+
+
+class SubmitPullRequestReviewInput(BaseModel):
+    """
+    Autogenerated input type of SubmitPullRequestReview
+    """
+
+    body: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    event: PullRequestReviewEvent
+    pullRequestId: Optional[ID] = None
+    pullRequestReviewId: Optional[ID] = None
+    typename__: Optional[Literal['SubmitPullRequestReviewInput']] = Field(
+        'SubmitPullRequestReviewInput', alias='__typename'
+    )
+
+
+class TagNamePatternParametersInput(BaseModel):
+    """
+    Parameters to be used for the tag_name_pattern rule
+    """
+
+    name: Optional[String] = None
+    negate: Optional[Boolean] = None
+    operator: String
+    pattern: String
+    typename__: Optional[Literal['TagNamePatternParametersInput']] = Field(
+        'TagNamePatternParametersInput', alias='__typename'
+    )
+
+
+class TeamDiscussionCommentOrder(BaseModel):
+    """
+    Ways in which team discussion comment connections can be ordered.
+    """
+
+    direction: OrderDirection
+    field: TeamDiscussionCommentOrderField
+    typename__: Optional[Literal['TeamDiscussionCommentOrder']] = Field(
+        'TeamDiscussionCommentOrder', alias='__typename'
+    )
+
+
+class TeamDiscussionOrder(BaseModel):
+    """
+    Ways in which team discussion connections can be ordered.
+    """
+
+    direction: OrderDirection
+    field: TeamDiscussionOrderField
+    typename__: Optional[Literal['TeamDiscussionOrder']] = Field(
+        'TeamDiscussionOrder', alias='__typename'
+    )
+
+
+class TeamMemberOrder(BaseModel):
+    """
+    Ordering options for team member connections
+    """
+
+    direction: OrderDirection
+    field: TeamMemberOrderField
+    typename__: Optional[Literal['TeamMemberOrder']] = Field(
+        'TeamMemberOrder', alias='__typename'
+    )
+
+
+class TeamOrder(BaseModel):
+    """
+    Ways in which team connections can be ordered.
+    """
+
+    direction: OrderDirection
+    field: TeamOrderField
+    typename__: Optional[Literal['TeamOrder']] = Field('TeamOrder', alias='__typename')
+
+
+class TeamRepositoryOrder(BaseModel):
+    """
+    Ordering options for team repository connections
+    """
+
+    direction: OrderDirection
+    field: TeamRepositoryOrderField
+    typename__: Optional[Literal['TeamRepositoryOrder']] = Field(
+        'TeamRepositoryOrder', alias='__typename'
+    )
+
+
+class TransferEnterpriseOrganizationInput(BaseModel):
+    """
+    Autogenerated input type of TransferEnterpriseOrganization
+    """
+
+    clientMutationId: Optional[String] = None
+    destinationEnterpriseId: ID
+    organizationId: ID
+    typename__: Optional[Literal['TransferEnterpriseOrganizationInput']] = Field(
+        'TransferEnterpriseOrganizationInput', alias='__typename'
+    )
+
+
+class TransferIssueInput(BaseModel):
+    """
+    Autogenerated input type of TransferIssue
+    """
+
+    clientMutationId: Optional[String] = None
+    createLabelsIfMissing: Optional[Boolean] = False
+    issueId: ID
+    repositoryId: ID
+    typename__: Optional[Literal['TransferIssueInput']] = Field(
+        'TransferIssueInput', alias='__typename'
+    )
+
+
+class UnarchiveProjectV2ItemInput(BaseModel):
+    """
+    Autogenerated input type of UnarchiveProjectV2Item
+    """
+
+    clientMutationId: Optional[String] = None
+    itemId: ID
+    projectId: ID
+    typename__: Optional[Literal['UnarchiveProjectV2ItemInput']] = Field(
+        'UnarchiveProjectV2ItemInput', alias='__typename'
+    )
+
+
+class UnarchiveRepositoryInput(BaseModel):
+    """
+    Autogenerated input type of UnarchiveRepository
+    """
+
+    clientMutationId: Optional[String] = None
+    repositoryId: ID
+    typename__: Optional[Literal['UnarchiveRepositoryInput']] = Field(
+        'UnarchiveRepositoryInput', alias='__typename'
+    )
+
+
+class UnfollowOrganizationInput(BaseModel):
+    """
+    Autogenerated input type of UnfollowOrganization
+    """
+
+    clientMutationId: Optional[String] = None
+    organizationId: ID
+    typename__: Optional[Literal['UnfollowOrganizationInput']] = Field(
+        'UnfollowOrganizationInput', alias='__typename'
+    )
+
+
+class UnfollowUserInput(BaseModel):
+    """
+    Autogenerated input type of UnfollowUser
+    """
+
+    clientMutationId: Optional[String] = None
+    userId: ID
+    typename__: Optional[Literal['UnfollowUserInput']] = Field(
+        'UnfollowUserInput', alias='__typename'
+    )
+
+
+class UnlinkProjectV2FromRepositoryInput(BaseModel):
+    """
+    Autogenerated input type of UnlinkProjectV2FromRepository
+    """
+
+    clientMutationId: Optional[String] = None
+    projectId: ID
+    repositoryId: ID
+    typename__: Optional[Literal['UnlinkProjectV2FromRepositoryInput']] = Field(
+        'UnlinkProjectV2FromRepositoryInput', alias='__typename'
+    )
+
+
+class UnlinkProjectV2FromTeamInput(BaseModel):
+    """
+    Autogenerated input type of UnlinkProjectV2FromTeam
+    """
+
+    clientMutationId: Optional[String] = None
+    projectId: ID
+    teamId: ID
+    typename__: Optional[Literal['UnlinkProjectV2FromTeamInput']] = Field(
+        'UnlinkProjectV2FromTeamInput', alias='__typename'
+    )
+
+
+class UnlinkRepositoryFromProjectInput(BaseModel):
+    """
+    Autogenerated input type of UnlinkRepositoryFromProject
+    """
+
+    clientMutationId: Optional[String] = None
+    projectId: ID
+    repositoryId: ID
+    typename__: Optional[Literal['UnlinkRepositoryFromProjectInput']] = Field(
+        'UnlinkRepositoryFromProjectInput', alias='__typename'
+    )
+
+
+class UnlockLockableInput(BaseModel):
+    """
+    Autogenerated input type of UnlockLockable
+    """
+
+    clientMutationId: Optional[String] = None
+    lockableId: ID
+    typename__: Optional[Literal['UnlockLockableInput']] = Field(
+        'UnlockLockableInput', alias='__typename'
+    )
+
+
+class UnmarkDiscussionCommentAsAnswerInput(BaseModel):
+    """
+    Autogenerated input type of UnmarkDiscussionCommentAsAnswer
+    """
+
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['UnmarkDiscussionCommentAsAnswerInput']] = Field(
+        'UnmarkDiscussionCommentAsAnswerInput', alias='__typename'
+    )
+
+
+class UnmarkFileAsViewedInput(BaseModel):
+    """
+    Autogenerated input type of UnmarkFileAsViewed
+    """
+
+    clientMutationId: Optional[String] = None
+    path: String
+    pullRequestId: ID
+    typename__: Optional[Literal['UnmarkFileAsViewedInput']] = Field(
+        'UnmarkFileAsViewedInput', alias='__typename'
+    )
+
+
+class UnmarkIssueAsDuplicateInput(BaseModel):
+    """
+    Autogenerated input type of UnmarkIssueAsDuplicate
+    """
+
+    canonicalId: ID
+    clientMutationId: Optional[String] = None
+    duplicateId: ID
+    typename__: Optional[Literal['UnmarkIssueAsDuplicateInput']] = Field(
+        'UnmarkIssueAsDuplicateInput', alias='__typename'
+    )
+
+
+class UnmarkProjectV2AsTemplateInput(BaseModel):
+    """
+    Autogenerated input type of UnmarkProjectV2AsTemplate
+    """
+
+    clientMutationId: Optional[String] = None
+    projectId: ID
+    typename__: Optional[Literal['UnmarkProjectV2AsTemplateInput']] = Field(
+        'UnmarkProjectV2AsTemplateInput', alias='__typename'
+    )
+
+
+class UnminimizeCommentInput(BaseModel):
+    """
+    Autogenerated input type of UnminimizeComment
+    """
+
+    clientMutationId: Optional[String] = None
+    subjectId: ID
+    typename__: Optional[Literal['UnminimizeCommentInput']] = Field(
+        'UnminimizeCommentInput', alias='__typename'
+    )
+
+
+class UnpinIssueInput(BaseModel):
+    """
+    Autogenerated input type of UnpinIssue
+    """
+
+    clientMutationId: Optional[String] = None
+    issueId: ID
+    typename__: Optional[Literal['UnpinIssueInput']] = Field(
+        'UnpinIssueInput', alias='__typename'
+    )
+
+
+class UnresolveReviewThreadInput(BaseModel):
+    """
+    Autogenerated input type of UnresolveReviewThread
+    """
+
+    clientMutationId: Optional[String] = None
+    threadId: ID
+    typename__: Optional[Literal['UnresolveReviewThreadInput']] = Field(
+        'UnresolveReviewThreadInput', alias='__typename'
+    )
+
+
+class UpdateBranchProtectionRuleInput(BaseModel):
+    """
+    Autogenerated input type of UpdateBranchProtectionRule
+    """
+
+    allowsDeletions: Optional[Boolean] = None
+    allowsForcePushes: Optional[Boolean] = None
+    blocksCreations: Optional[Boolean] = None
+    branchProtectionRuleId: ID
+    bypassForcePushActorIds: Optional[List[ID]] = None
+    bypassPullRequestActorIds: Optional[List[ID]] = None
+    clientMutationId: Optional[String] = None
+    dismissesStaleReviews: Optional[Boolean] = None
+    isAdminEnforced: Optional[Boolean] = None
+    lockAllowsFetchAndMerge: Optional[Boolean] = None
+    lockBranch: Optional[Boolean] = None
+    pattern: Optional[String] = None
+    pushActorIds: Optional[List[ID]] = None
+    requireLastPushApproval: Optional[Boolean] = None
+    requiredApprovingReviewCount: Optional[Int] = None
+    requiredDeploymentEnvironments: Optional[List[String]] = None
+    requiredStatusCheckContexts: Optional[List[String]] = None
+    requiredStatusChecks: Optional[List[RequiredStatusCheckInput]] = None
+    requiresApprovingReviews: Optional[Boolean] = None
+    requiresCodeOwnerReviews: Optional[Boolean] = None
+    requiresCommitSignatures: Optional[Boolean] = None
+    requiresConversationResolution: Optional[Boolean] = None
+    requiresDeployments: Optional[Boolean] = None
+    requiresLinearHistory: Optional[Boolean] = None
+    requiresStatusChecks: Optional[Boolean] = None
+    requiresStrictStatusChecks: Optional[Boolean] = None
+    restrictsPushes: Optional[Boolean] = None
+    restrictsReviewDismissals: Optional[Boolean] = None
+    reviewDismissalActorIds: Optional[List[ID]] = None
+    typename__: Optional[Literal['UpdateBranchProtectionRuleInput']] = Field(
+        'UpdateBranchProtectionRuleInput', alias='__typename'
+    )
+
+
+class UpdateCheckRunInput(BaseModel):
+    """
+    Autogenerated input type of UpdateCheckRun
+    """
+
+    actions: Optional[List[CheckRunAction]] = None
+    checkRunId: ID
+    clientMutationId: Optional[String] = None
+    completedAt: Optional[DateTime] = None
+    conclusion: Optional[CheckConclusionState] = None
+    detailsUrl: Optional[URI] = None
+    externalId: Optional[String] = None
+    name: Optional[String] = None
+    output: Optional[CheckRunOutput] = None
+    repositoryId: ID
+    startedAt: Optional[DateTime] = None
+    status: Optional[RequestableCheckStatusState] = None
+    typename__: Optional[Literal['UpdateCheckRunInput']] = Field(
+        'UpdateCheckRunInput', alias='__typename'
+    )
+
+
+class UpdateCheckSuitePreferencesInput(BaseModel):
+    """
+    Autogenerated input type of UpdateCheckSuitePreferences
+    """
+
+    autoTriggerPreferences: List[CheckSuiteAutoTriggerPreference]
+    clientMutationId: Optional[String] = None
+    repositoryId: ID
+    typename__: Optional[Literal['UpdateCheckSuitePreferencesInput']] = Field(
+        'UpdateCheckSuitePreferencesInput', alias='__typename'
+    )
+
+
+class UpdateDiscussionCommentInput(BaseModel):
+    """
+    Autogenerated input type of UpdateDiscussionComment
+    """
+
+    body: String
+    clientMutationId: Optional[String] = None
+    commentId: ID
+    typename__: Optional[Literal['UpdateDiscussionCommentInput']] = Field(
+        'UpdateDiscussionCommentInput', alias='__typename'
+    )
+
+
+class UpdateDiscussionInput(BaseModel):
+    """
+    Autogenerated input type of UpdateDiscussion
+    """
+
+    body: Optional[String] = None
+    categoryId: Optional[ID] = None
+    clientMutationId: Optional[String] = None
+    discussionId: ID
+    title: Optional[String] = None
+    typename__: Optional[Literal['UpdateDiscussionInput']] = Field(
+        'UpdateDiscussionInput', alias='__typename'
+    )
+
+
+class UpdateEnterpriseAdministratorRoleInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnterpriseAdministratorRole
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    login: String
+    role: EnterpriseAdministratorRole
+    typename__: Optional[Literal['UpdateEnterpriseAdministratorRoleInput']] = Field(
+        'UpdateEnterpriseAdministratorRoleInput', alias='__typename'
+    )
+
+
+class UpdateEnterpriseAllowPrivateRepositoryForkingSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnterpriseAllowPrivateRepositoryForkingSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    policyValue: Optional[EnterpriseAllowPrivateRepositoryForkingPolicyValue] = None
+    settingValue: EnterpriseEnabledDisabledSettingValue
+    typename__: Optional[
+        Literal['UpdateEnterpriseAllowPrivateRepositoryForkingSettingInput']
+    ] = Field(
+        'UpdateEnterpriseAllowPrivateRepositoryForkingSettingInput', alias='__typename'
+    )
+
+
+class UpdateEnterpriseDefaultRepositoryPermissionSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnterpriseDefaultRepositoryPermissionSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    settingValue: EnterpriseDefaultRepositoryPermissionSettingValue
+    typename__: Optional[
+        Literal['UpdateEnterpriseDefaultRepositoryPermissionSettingInput']
+    ] = Field(
+        'UpdateEnterpriseDefaultRepositoryPermissionSettingInput', alias='__typename'
+    )
+
+
+class UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnterpriseMembersCanChangeRepositoryVisibilitySetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    settingValue: EnterpriseEnabledDisabledSettingValue
+    typename__: Optional[
+        Literal['UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingInput']
+    ] = Field(
+        'UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingInput',
+        alias='__typename',
+    )
+
+
+class UpdateEnterpriseMembersCanCreateRepositoriesSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnterpriseMembersCanCreateRepositoriesSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    membersCanCreateInternalRepositories: Optional[Boolean] = None
+    membersCanCreatePrivateRepositories: Optional[Boolean] = None
+    membersCanCreatePublicRepositories: Optional[Boolean] = None
+    membersCanCreateRepositoriesPolicyEnabled: Optional[Boolean] = None
+    settingValue: Optional[EnterpriseMembersCanCreateRepositoriesSettingValue] = None
+    typename__: Optional[
+        Literal['UpdateEnterpriseMembersCanCreateRepositoriesSettingInput']
+    ] = Field(
+        'UpdateEnterpriseMembersCanCreateRepositoriesSettingInput', alias='__typename'
+    )
+
+
+class UpdateEnterpriseMembersCanDeleteIssuesSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnterpriseMembersCanDeleteIssuesSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    settingValue: EnterpriseEnabledDisabledSettingValue
+    typename__: Optional[
+        Literal['UpdateEnterpriseMembersCanDeleteIssuesSettingInput']
+    ] = Field('UpdateEnterpriseMembersCanDeleteIssuesSettingInput', alias='__typename')
+
+
+class UpdateEnterpriseMembersCanDeleteRepositoriesSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnterpriseMembersCanDeleteRepositoriesSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    settingValue: EnterpriseEnabledDisabledSettingValue
+    typename__: Optional[
+        Literal['UpdateEnterpriseMembersCanDeleteRepositoriesSettingInput']
+    ] = Field(
+        'UpdateEnterpriseMembersCanDeleteRepositoriesSettingInput', alias='__typename'
+    )
+
+
+class UpdateEnterpriseMembersCanInviteCollaboratorsSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnterpriseMembersCanInviteCollaboratorsSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    settingValue: EnterpriseEnabledDisabledSettingValue
+    typename__: Optional[
+        Literal['UpdateEnterpriseMembersCanInviteCollaboratorsSettingInput']
+    ] = Field(
+        'UpdateEnterpriseMembersCanInviteCollaboratorsSettingInput', alias='__typename'
+    )
+
+
+class UpdateEnterpriseMembersCanMakePurchasesSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnterpriseMembersCanMakePurchasesSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    settingValue: EnterpriseMembersCanMakePurchasesSettingValue
+    typename__: Optional[
+        Literal['UpdateEnterpriseMembersCanMakePurchasesSettingInput']
+    ] = Field('UpdateEnterpriseMembersCanMakePurchasesSettingInput', alias='__typename')
+
+
+class UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnterpriseMembersCanUpdateProtectedBranchesSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    settingValue: EnterpriseEnabledDisabledSettingValue
+    typename__: Optional[
+        Literal['UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingInput']
+    ] = Field(
+        'UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingInput',
+        alias='__typename',
+    )
+
+
+class UpdateEnterpriseMembersCanViewDependencyInsightsSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnterpriseMembersCanViewDependencyInsightsSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    settingValue: EnterpriseEnabledDisabledSettingValue
+    typename__: Optional[
+        Literal['UpdateEnterpriseMembersCanViewDependencyInsightsSettingInput']
+    ] = Field(
+        'UpdateEnterpriseMembersCanViewDependencyInsightsSettingInput',
+        alias='__typename',
+    )
+
+
+class UpdateEnterpriseOrganizationProjectsSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnterpriseOrganizationProjectsSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    settingValue: EnterpriseEnabledDisabledSettingValue
+    typename__: Optional[
+        Literal['UpdateEnterpriseOrganizationProjectsSettingInput']
+    ] = Field('UpdateEnterpriseOrganizationProjectsSettingInput', alias='__typename')
+
+
+class UpdateEnterpriseOwnerOrganizationRoleInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnterpriseOwnerOrganizationRole
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    organizationId: ID
+    organizationRole: RoleInOrganization
+    typename__: Optional[Literal['UpdateEnterpriseOwnerOrganizationRoleInput']] = Field(
+        'UpdateEnterpriseOwnerOrganizationRoleInput', alias='__typename'
+    )
+
+
+class UpdateEnterpriseProfileInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnterpriseProfile
+    """
+
+    clientMutationId: Optional[String] = None
+    description: Optional[String] = None
+    enterpriseId: ID
+    location: Optional[String] = None
+    name: Optional[String] = None
+    websiteUrl: Optional[String] = None
+    typename__: Optional[Literal['UpdateEnterpriseProfileInput']] = Field(
+        'UpdateEnterpriseProfileInput', alias='__typename'
+    )
+
+
+class UpdateEnterpriseRepositoryProjectsSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnterpriseRepositoryProjectsSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    settingValue: EnterpriseEnabledDisabledSettingValue
+    typename__: Optional[
+        Literal['UpdateEnterpriseRepositoryProjectsSettingInput']
+    ] = Field('UpdateEnterpriseRepositoryProjectsSettingInput', alias='__typename')
+
+
+class UpdateEnterpriseTeamDiscussionsSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnterpriseTeamDiscussionsSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    settingValue: EnterpriseEnabledDisabledSettingValue
+    typename__: Optional[
+        Literal['UpdateEnterpriseTeamDiscussionsSettingInput']
+    ] = Field('UpdateEnterpriseTeamDiscussionsSettingInput', alias='__typename')
+
+
+class UpdateEnterpriseTwoFactorAuthenticationRequiredSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnterpriseTwoFactorAuthenticationRequiredSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    enterpriseId: ID
+    settingValue: EnterpriseEnabledSettingValue
+    typename__: Optional[
+        Literal['UpdateEnterpriseTwoFactorAuthenticationRequiredSettingInput']
+    ] = Field(
+        'UpdateEnterpriseTwoFactorAuthenticationRequiredSettingInput',
+        alias='__typename',
+    )
+
+
+class UpdateEnvironmentInput(BaseModel):
+    """
+    Autogenerated input type of UpdateEnvironment
+    """
+
+    clientMutationId: Optional[String] = None
+    environmentId: ID
+    preventSelfReview: Optional[Boolean] = None
+    reviewers: Optional[List[ID]] = None
+    waitTimer: Optional[Int] = None
+    typename__: Optional[Literal['UpdateEnvironmentInput']] = Field(
+        'UpdateEnvironmentInput', alias='__typename'
+    )
+
+
+class UpdateIpAllowListEnabledSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateIpAllowListEnabledSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    ownerId: ID
+    settingValue: IpAllowListEnabledSettingValue
+    typename__: Optional[Literal['UpdateIpAllowListEnabledSettingInput']] = Field(
+        'UpdateIpAllowListEnabledSettingInput', alias='__typename'
+    )
+
+
+class UpdateIpAllowListEntryInput(BaseModel):
+    """
+    Autogenerated input type of UpdateIpAllowListEntry
+    """
+
+    allowListValue: String
+    clientMutationId: Optional[String] = None
+    ipAllowListEntryId: ID
+    isActive: Boolean
+    name: Optional[String] = None
+    typename__: Optional[Literal['UpdateIpAllowListEntryInput']] = Field(
+        'UpdateIpAllowListEntryInput', alias='__typename'
+    )
+
+
+class UpdateIpAllowListForInstalledAppsEnabledSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateIpAllowListForInstalledAppsEnabledSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    ownerId: ID
+    settingValue: IpAllowListForInstalledAppsEnabledSettingValue
+    typename__: Optional[
+        Literal['UpdateIpAllowListForInstalledAppsEnabledSettingInput']
+    ] = Field(
+        'UpdateIpAllowListForInstalledAppsEnabledSettingInput', alias='__typename'
+    )
+
+
+class UpdateIssueCommentInput(BaseModel):
+    """
+    Autogenerated input type of UpdateIssueComment
+    """
+
+    body: String
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['UpdateIssueCommentInput']] = Field(
+        'UpdateIssueCommentInput', alias='__typename'
+    )
+
+
+class UpdateIssueInput(BaseModel):
+    """
+    Autogenerated input type of UpdateIssue
+    """
+
+    assigneeIds: Optional[List[ID]] = None
+    body: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    id: ID
+    labelIds: Optional[List[ID]] = None
+    milestoneId: Optional[ID] = None
+    projectIds: Optional[List[ID]] = None
+    state: Optional[IssueState] = None
+    title: Optional[String] = None
+    typename__: Optional[Literal['UpdateIssueInput']] = Field(
+        'UpdateIssueInput', alias='__typename'
+    )
+
+
+class UpdateLabelInput(BaseModel):
+    """
+    Autogenerated input type of UpdateLabel
+    """
+
+    clientMutationId: Optional[String] = None
+    color: Optional[String] = None
+    description: Optional[String] = None
+    id: ID
+    name: Optional[String] = None
+    typename__: Optional[Literal['UpdateLabelInput']] = Field(
+        'UpdateLabelInput', alias='__typename'
+    )
+
+
+class UpdateNotificationRestrictionSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateNotificationRestrictionSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    ownerId: ID
+    settingValue: NotificationRestrictionSettingValue
+    typename__: Optional[Literal['UpdateNotificationRestrictionSettingInput']] = Field(
+        'UpdateNotificationRestrictionSettingInput', alias='__typename'
+    )
+
+
+class UpdateOrganizationAllowPrivateRepositoryForkingSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateOrganizationAllowPrivateRepositoryForkingSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    forkingEnabled: Boolean
+    organizationId: ID
+    typename__: Optional[
+        Literal['UpdateOrganizationAllowPrivateRepositoryForkingSettingInput']
+    ] = Field(
+        'UpdateOrganizationAllowPrivateRepositoryForkingSettingInput',
+        alias='__typename',
+    )
+
+
+class UpdateOrganizationWebCommitSignoffSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateOrganizationWebCommitSignoffSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    organizationId: ID
+    webCommitSignoffRequired: Boolean
+    typename__: Optional[
+        Literal['UpdateOrganizationWebCommitSignoffSettingInput']
+    ] = Field('UpdateOrganizationWebCommitSignoffSettingInput', alias='__typename')
+
+
+class UpdateParametersInput(BaseModel):
+    """
+    Only allow users with bypass permission to update matching refs.
+    """
+
+    updateAllowsFetchAndMerge: Boolean
+    typename__: Optional[Literal['UpdateParametersInput']] = Field(
+        'UpdateParametersInput', alias='__typename'
+    )
+
+
+class UpdatePatreonSponsorabilityInput(BaseModel):
+    """
+    Autogenerated input type of UpdatePatreonSponsorability
+    """
+
+    clientMutationId: Optional[String] = None
+    enablePatreonSponsorships: Boolean
+    sponsorableLogin: Optional[String] = None
+    typename__: Optional[Literal['UpdatePatreonSponsorabilityInput']] = Field(
+        'UpdatePatreonSponsorabilityInput', alias='__typename'
+    )
+
+
+class UpdateProjectCardInput(BaseModel):
+    """
+    Autogenerated input type of UpdateProjectCard
+    """
+
+    clientMutationId: Optional[String] = None
+    isArchived: Optional[Boolean] = None
+    note: Optional[String] = None
+    projectCardId: ID
+    typename__: Optional[Literal['UpdateProjectCardInput']] = Field(
+        'UpdateProjectCardInput', alias='__typename'
+    )
+
+
+class UpdateProjectColumnInput(BaseModel):
+    """
+    Autogenerated input type of UpdateProjectColumn
+    """
+
+    clientMutationId: Optional[String] = None
+    name: String
+    projectColumnId: ID
+    typename__: Optional[Literal['UpdateProjectColumnInput']] = Field(
+        'UpdateProjectColumnInput', alias='__typename'
+    )
+
+
+class UpdateProjectInput(BaseModel):
+    """
+    Autogenerated input type of UpdateProject
+    """
+
+    body: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    name: Optional[String] = None
+    projectId: ID
+    public: Optional[Boolean] = None
+    state: Optional[ProjectState] = None
+    typename__: Optional[Literal['UpdateProjectInput']] = Field(
+        'UpdateProjectInput', alias='__typename'
+    )
+
+
+class UpdateProjectV2CollaboratorsInput(BaseModel):
+    """
+    Autogenerated input type of UpdateProjectV2Collaborators
+    """
+
+    clientMutationId: Optional[String] = None
+    collaborators: List[ProjectV2Collaborator]
+    projectId: ID
+    typename__: Optional[Literal['UpdateProjectV2CollaboratorsInput']] = Field(
+        'UpdateProjectV2CollaboratorsInput', alias='__typename'
+    )
+
+
+class UpdateProjectV2DraftIssueInput(BaseModel):
+    """
+    Autogenerated input type of UpdateProjectV2DraftIssue
+    """
+
+    assigneeIds: Optional[List[ID]] = None
+    body: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    draftIssueId: ID
+    title: Optional[String] = None
+    typename__: Optional[Literal['UpdateProjectV2DraftIssueInput']] = Field(
+        'UpdateProjectV2DraftIssueInput', alias='__typename'
+    )
+
+
+class UpdateProjectV2Input(BaseModel):
+    """
+    Autogenerated input type of UpdateProjectV2
+    """
+
+    clientMutationId: Optional[String] = None
+    closed: Optional[Boolean] = None
+    projectId: ID
+    public: Optional[Boolean] = None
+    readme: Optional[String] = None
+    shortDescription: Optional[String] = None
+    title: Optional[String] = None
+    typename__: Optional[Literal['UpdateProjectV2Input']] = Field(
+        'UpdateProjectV2Input', alias='__typename'
+    )
+
+
+class UpdateProjectV2ItemFieldValueInput(BaseModel):
+    """
+    Autogenerated input type of UpdateProjectV2ItemFieldValue
+    """
+
+    clientMutationId: Optional[String] = None
+    fieldId: ID
+    itemId: ID
+    projectId: ID
+    value: ProjectV2FieldValue
+    typename__: Optional[Literal['UpdateProjectV2ItemFieldValueInput']] = Field(
+        'UpdateProjectV2ItemFieldValueInput', alias='__typename'
+    )
+
+
+class UpdateProjectV2ItemPositionInput(BaseModel):
+    """
+    Autogenerated input type of UpdateProjectV2ItemPosition
+    """
+
+    afterId: Optional[ID] = None
+    clientMutationId: Optional[String] = None
+    itemId: ID
+    projectId: ID
+    typename__: Optional[Literal['UpdateProjectV2ItemPositionInput']] = Field(
+        'UpdateProjectV2ItemPositionInput', alias='__typename'
+    )
+
+
+class UpdatePullRequestBranchInput(BaseModel):
+    """
+    Autogenerated input type of UpdatePullRequestBranch
+    """
+
+    clientMutationId: Optional[String] = None
+    expectedHeadOid: Optional[GitObjectID] = None
+    pullRequestId: ID
+    updateMethod: Optional[PullRequestBranchUpdateMethod] = None
+    typename__: Optional[Literal['UpdatePullRequestBranchInput']] = Field(
+        'UpdatePullRequestBranchInput', alias='__typename'
+    )
+
+
+class UpdatePullRequestInput(BaseModel):
+    """
+    Autogenerated input type of UpdatePullRequest
+    """
+
+    assigneeIds: Optional[List[ID]] = None
+    baseRefName: Optional[String] = None
+    body: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    labelIds: Optional[List[ID]] = None
+    maintainerCanModify: Optional[Boolean] = None
+    milestoneId: Optional[ID] = None
+    projectIds: Optional[List[ID]] = None
+    pullRequestId: ID
+    state: Optional[PullRequestUpdateState] = None
+    title: Optional[String] = None
+    typename__: Optional[Literal['UpdatePullRequestInput']] = Field(
+        'UpdatePullRequestInput', alias='__typename'
+    )
+
+
+class UpdatePullRequestReviewCommentInput(BaseModel):
+    """
+    Autogenerated input type of UpdatePullRequestReviewComment
+    """
+
+    body: String
+    clientMutationId: Optional[String] = None
+    pullRequestReviewCommentId: ID
+    typename__: Optional[Literal['UpdatePullRequestReviewCommentInput']] = Field(
+        'UpdatePullRequestReviewCommentInput', alias='__typename'
+    )
+
+
+class UpdatePullRequestReviewInput(BaseModel):
+    """
+    Autogenerated input type of UpdatePullRequestReview
+    """
+
+    body: String
+    clientMutationId: Optional[String] = None
+    pullRequestReviewId: ID
+    typename__: Optional[Literal['UpdatePullRequestReviewInput']] = Field(
+        'UpdatePullRequestReviewInput', alias='__typename'
+    )
+
+
+class UpdateRefInput(BaseModel):
+    """
+    Autogenerated input type of UpdateRef
+    """
+
+    clientMutationId: Optional[String] = None
+    force: Optional[Boolean] = False
+    oid: GitObjectID
+    refId: ID
+    typename__: Optional[Literal['UpdateRefInput']] = Field(
+        'UpdateRefInput', alias='__typename'
+    )
+
+
+class UpdateRefsInput(BaseModel):
+    """
+    Autogenerated input type of UpdateRefs
+    """
+
+    clientMutationId: Optional[String] = None
+    refUpdates: List[RefUpdate]
+    repositoryId: ID
+    typename__: Optional[Literal['UpdateRefsInput']] = Field(
+        'UpdateRefsInput', alias='__typename'
+    )
+
+
+class UpdateRepositoryInput(BaseModel):
+    """
+    Autogenerated input type of UpdateRepository
+    """
+
+    clientMutationId: Optional[String] = None
+    description: Optional[String] = None
+    hasDiscussionsEnabled: Optional[Boolean] = None
+    hasIssuesEnabled: Optional[Boolean] = None
+    hasProjectsEnabled: Optional[Boolean] = None
+    hasWikiEnabled: Optional[Boolean] = None
+    homepageUrl: Optional[URI] = None
+    name: Optional[String] = None
+    repositoryId: ID
+    template: Optional[Boolean] = None
+    typename__: Optional[Literal['UpdateRepositoryInput']] = Field(
+        'UpdateRepositoryInput', alias='__typename'
+    )
+
+
+class UpdateRepositoryRulesetInput(BaseModel):
+    """
+    Autogenerated input type of UpdateRepositoryRuleset
+    """
+
+    bypassActors: Optional[List[RepositoryRulesetBypassActorInput]] = None
+    clientMutationId: Optional[String] = None
+    conditions: Optional[RepositoryRuleConditionsInput] = None
+    enforcement: Optional[RuleEnforcement] = None
+    name: Optional[String] = None
+    repositoryRulesetId: ID
+    rules: Optional[List[RepositoryRuleInput]] = None
+    target: Optional[RepositoryRulesetTarget] = None
+    typename__: Optional[Literal['UpdateRepositoryRulesetInput']] = Field(
+        'UpdateRepositoryRulesetInput', alias='__typename'
+    )
+
+
+class UpdateRepositoryWebCommitSignoffSettingInput(BaseModel):
+    """
+    Autogenerated input type of UpdateRepositoryWebCommitSignoffSetting
+    """
+
+    clientMutationId: Optional[String] = None
+    repositoryId: ID
+    webCommitSignoffRequired: Boolean
+    typename__: Optional[
+        Literal['UpdateRepositoryWebCommitSignoffSettingInput']
+    ] = Field('UpdateRepositoryWebCommitSignoffSettingInput', alias='__typename')
+
+
+class UpdateSponsorshipPreferencesInput(BaseModel):
+    """
+    Autogenerated input type of UpdateSponsorshipPreferences
+    """
+
+    clientMutationId: Optional[String] = None
+    privacyLevel: Optional[SponsorshipPrivacy] = 'PUBLIC'
+    receiveEmails: Optional[Boolean] = True
+    sponsorId: Optional[ID] = None
+    sponsorLogin: Optional[String] = None
+    sponsorableId: Optional[ID] = None
+    sponsorableLogin: Optional[String] = None
+    typename__: Optional[Literal['UpdateSponsorshipPreferencesInput']] = Field(
+        'UpdateSponsorshipPreferencesInput', alias='__typename'
+    )
+
+
+class UpdateSubscriptionInput(BaseModel):
+    """
+    Autogenerated input type of UpdateSubscription
+    """
+
+    clientMutationId: Optional[String] = None
+    state: SubscriptionState
+    subscribableId: ID
+    typename__: Optional[Literal['UpdateSubscriptionInput']] = Field(
+        'UpdateSubscriptionInput', alias='__typename'
+    )
+
+
+class UpdateTeamDiscussionCommentInput(BaseModel):
+    """
+    Autogenerated input type of UpdateTeamDiscussionComment
+    """
+
+    body: String
+    bodyVersion: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['UpdateTeamDiscussionCommentInput']] = Field(
+        'UpdateTeamDiscussionCommentInput', alias='__typename'
+    )
+
+
+class UpdateTeamDiscussionInput(BaseModel):
+    """
+    Autogenerated input type of UpdateTeamDiscussion
+    """
+
+    body: Optional[String] = None
+    bodyVersion: Optional[String] = None
+    clientMutationId: Optional[String] = None
+    id: ID
+    pinned: Optional[Boolean] = None
+    title: Optional[String] = None
+    typename__: Optional[Literal['UpdateTeamDiscussionInput']] = Field(
+        'UpdateTeamDiscussionInput', alias='__typename'
+    )
+
+
+class UpdateTeamReviewAssignmentInput(BaseModel):
+    """
+    Autogenerated input type of UpdateTeamReviewAssignment
+    """
+
+    algorithm: Optional[TeamReviewAssignmentAlgorithm] = 'ROUND_ROBIN'
+    clientMutationId: Optional[String] = None
+    enabled: Boolean
+    excludedTeamMemberIds: Optional[List[ID]] = None
+    id: ID
+    notifyTeam: Optional[Boolean] = True
+    teamMemberCount: Optional[Int] = 1
+    typename__: Optional[Literal['UpdateTeamReviewAssignmentInput']] = Field(
+        'UpdateTeamReviewAssignmentInput', alias='__typename'
+    )
+
+
+class UpdateTeamsRepositoryInput(BaseModel):
+    """
+    Autogenerated input type of UpdateTeamsRepository
+    """
+
+    clientMutationId: Optional[String] = None
+    permission: RepositoryPermission
+    repositoryId: ID
+    teamIds: List[ID]
+    typename__: Optional[Literal['UpdateTeamsRepositoryInput']] = Field(
+        'UpdateTeamsRepositoryInput', alias='__typename'
+    )
+
+
+class UpdateTopicsInput(BaseModel):
+    """
+    Autogenerated input type of UpdateTopics
+    """
+
+    clientMutationId: Optional[String] = None
+    repositoryId: ID
+    topicNames: List[String]
+    typename__: Optional[Literal['UpdateTopicsInput']] = Field(
+        'UpdateTopicsInput', alias='__typename'
+    )
+
+
+class UserStatusOrder(BaseModel):
+    """
+    Ordering options for user status connections.
+    """
+
+    direction: OrderDirection
+    field: UserStatusOrderField
+    typename__: Optional[Literal['UserStatusOrder']] = Field(
+        'UserStatusOrder', alias='__typename'
+    )
+
+
+class VerifiableDomainOrder(BaseModel):
+    """
+    Ordering options for verifiable domain connections.
+    """
+
+    direction: OrderDirection
+    field: VerifiableDomainOrderField
+    typename__: Optional[Literal['VerifiableDomainOrder']] = Field(
+        'VerifiableDomainOrder', alias='__typename'
+    )
+
+
+class VerifyVerifiableDomainInput(BaseModel):
+    """
+    Autogenerated input type of VerifyVerifiableDomain
+    """
+
+    clientMutationId: Optional[String] = None
+    id: ID
+    typename__: Optional[Literal['VerifyVerifiableDomainInput']] = Field(
+        'VerifyVerifiableDomainInput', alias='__typename'
+    )
+
+
+class WorkflowFileReferenceInput(BaseModel):
+    """
+    A workflow that must run for this rule to pass
+    """
+
+    path: String
+    ref: Optional[String] = None
+    repositoryId: Int
+    sha: Optional[String] = None
+    typename__: Optional[Literal['WorkflowFileReferenceInput']] = Field(
+        'WorkflowFileReferenceInput', alias='__typename'
+    )
+
+
+class WorkflowRunOrder(BaseModel):
+    """
+    Ways in which lists of workflow runs can be ordered upon return.
+    """
+
+    direction: OrderDirection
+    field: WorkflowRunOrderField
+    typename__: Optional[Literal['WorkflowRunOrder']] = Field(
+        'WorkflowRunOrder', alias='__typename'
+    )
+
+
+class WorkflowsParametersInput(BaseModel):
+    """
+    Require all changes made to a targeted branch to pass the specified workflows before they can be merged.
+    """
+
+    workflows: List[WorkflowFileReferenceInput]
+    typename__: Optional[Literal['WorkflowsParametersInput']] = Field(
+        'WorkflowsParametersInput', alias='__typename'
+    )
+
+
+# Types that can be assigned to issues.
+Assignee: TypeAlias = Union[
+    'Bot',
+    'Mannequin',
+    'Organization',
+    'User',
+]
+
+
+# Types that can initiate an audit log event.
+AuditEntryActor: TypeAlias = Union[
+    'Bot',
+    'Organization',
+    'User',
+]
+
+
+# Types which can be actors for `BranchActorAllowance` objects.
+BranchActorAllowanceActor: TypeAlias = Union[
+    'App',
+    'Team',
+    'User',
+]
+
+
+# Types that can represent a repository ruleset bypass actor.
+BypassActor: TypeAlias = Union[
+    'App',
+    'Team',
+]
+
+
+# An object which can have its data claimed or claim data from another.
+Claimable: TypeAlias = Union[
+    'Mannequin',
+    'User',
+]
+
+
+# The object which triggered a `ClosedEvent`.
+Closer: TypeAlias = Union[
+    'Commit',
+    'PullRequest',
+]
+
+
+# Represents either a issue the viewer can access or a restricted contribution.
+CreatedIssueOrRestrictedContribution: TypeAlias = Union[
+    'CreatedIssueContribution',
+    'RestrictedContribution',
+]
+
+
+# Represents either a pull request the viewer can access or a restricted contribution.
+CreatedPullRequestOrRestrictedContribution: TypeAlias = Union[
+    'CreatedPullRequestContribution',
+    'RestrictedContribution',
+]
+
+
+# Represents either a repository the viewer can access or a restricted contribution.
+CreatedRepositoryOrRestrictedContribution: TypeAlias = Union[
+    'CreatedRepositoryContribution',
+    'RestrictedContribution',
+]
+
+
+# Users and teams.
+DeploymentReviewer: TypeAlias = Union[
+    'Team',
+    'User',
+]
+
+
+# An object that is a member of an enterprise.
+EnterpriseMember: TypeAlias = Union[
+    'EnterpriseUserAccount',
+    'User',
+]
+
+
+# Types that can own an IP allow list.
+IpAllowListOwner: TypeAlias = Union[
+    'App',
+    'Enterprise',
+    'Organization',
+]
+
+
+# Used for return value of Repository.issueOrPullRequest.
+IssueOrPullRequest: TypeAlias = Union[
+    'Issue',
+    'PullRequest',
+]
+
+
+# An item in an issue timeline
+IssueTimelineItem: TypeAlias = Union[
+    'AssignedEvent',
+    'ClosedEvent',
+    'Commit',
+    'CrossReferencedEvent',
+    'DemilestonedEvent',
+    'IssueComment',
+    'LabeledEvent',
+    'LockedEvent',
+    'MilestonedEvent',
+    'ReferencedEvent',
+    'RenamedTitleEvent',
+    'ReopenedEvent',
+    'SubscribedEvent',
+    'TransferredEvent',
+    'UnassignedEvent',
+    'UnlabeledEvent',
+    'UnlockedEvent',
+    'UnsubscribedEvent',
+    'UserBlockedEvent',
+]
+
+
+# An item in an issue timeline
+IssueTimelineItems: TypeAlias = Union[
+    'AddedToProjectEvent',
+    'AssignedEvent',
+    'ClosedEvent',
+    'CommentDeletedEvent',
+    'ConnectedEvent',
+    'ConvertedNoteToIssueEvent',
+    'ConvertedToDiscussionEvent',
+    'CrossReferencedEvent',
+    'DemilestonedEvent',
+    'DisconnectedEvent',
+    'IssueComment',
+    'LabeledEvent',
+    'LockedEvent',
+    'MarkedAsDuplicateEvent',
+    'MentionedEvent',
+    'MilestonedEvent',
+    'MovedColumnsInProjectEvent',
+    'PinnedEvent',
+    'ReferencedEvent',
+    'RemovedFromProjectEvent',
+    'RenamedTitleEvent',
+    'ReopenedEvent',
+    'SubscribedEvent',
+    'TransferredEvent',
+    'UnassignedEvent',
+    'UnlabeledEvent',
+    'UnlockedEvent',
+    'UnmarkedAsDuplicateEvent',
+    'UnpinnedEvent',
+    'UnsubscribedEvent',
+    'UserBlockedEvent',
+]
+
+
+# Types that can be inside a Milestone.
+MilestoneItem: TypeAlias = Union[
+    'Issue',
+    'PullRequest',
+]
+
+
+# Types of memberships that can be restored for an Organization member.
+OrgRestoreMemberAuditEntryMembership: TypeAlias = Union[
+    'OrgRestoreMemberMembershipOrganizationAuditEntryData',
+    'OrgRestoreMemberMembershipRepositoryAuditEntryData',
+    'OrgRestoreMemberMembershipTeamAuditEntryData',
+]
+
+
+# An audit entry in an organization audit log.
+OrganizationAuditEntry: TypeAlias = Union[
+    'MembersCanDeleteReposClearAuditEntry',
+    'MembersCanDeleteReposDisableAuditEntry',
+    'MembersCanDeleteReposEnableAuditEntry',
+    'OauthApplicationCreateAuditEntry',
+    'OrgAddBillingManagerAuditEntry',
+    'OrgAddMemberAuditEntry',
+    'OrgBlockUserAuditEntry',
+    'OrgConfigDisableCollaboratorsOnlyAuditEntry',
+    'OrgConfigEnableCollaboratorsOnlyAuditEntry',
+    'OrgCreateAuditEntry',
+    'OrgDisableOauthAppRestrictionsAuditEntry',
+    'OrgDisableSamlAuditEntry',
+    'OrgDisableTwoFactorRequirementAuditEntry',
+    'OrgEnableOauthAppRestrictionsAuditEntry',
+    'OrgEnableSamlAuditEntry',
+    'OrgEnableTwoFactorRequirementAuditEntry',
+    'OrgInviteMemberAuditEntry',
+    'OrgInviteToBusinessAuditEntry',
+    'OrgOauthAppAccessApprovedAuditEntry',
+    'OrgOauthAppAccessBlockedAuditEntry',
+    'OrgOauthAppAccessDeniedAuditEntry',
+    'OrgOauthAppAccessRequestedAuditEntry',
+    'OrgOauthAppAccessUnblockedAuditEntry',
+    'OrgRemoveBillingManagerAuditEntry',
+    'OrgRemoveMemberAuditEntry',
+    'OrgRemoveOutsideCollaboratorAuditEntry',
+    'OrgRestoreMemberAuditEntry',
+    'OrgUnblockUserAuditEntry',
+    'OrgUpdateDefaultRepositoryPermissionAuditEntry',
+    'OrgUpdateMemberAuditEntry',
+    'OrgUpdateMemberRepositoryCreationPermissionAuditEntry',
+    'OrgUpdateMemberRepositoryInvitationPermissionAuditEntry',
+    'PrivateRepositoryForkingDisableAuditEntry',
+    'PrivateRepositoryForkingEnableAuditEntry',
+    'RepoAccessAuditEntry',
+    'RepoAddMemberAuditEntry',
+    'RepoAddTopicAuditEntry',
+    'RepoArchivedAuditEntry',
+    'RepoChangeMergeSettingAuditEntry',
+    'RepoConfigDisableAnonymousGitAccessAuditEntry',
+    'RepoConfigDisableCollaboratorsOnlyAuditEntry',
+    'RepoConfigDisableContributorsOnlyAuditEntry',
+    'RepoConfigDisableSockpuppetDisallowedAuditEntry',
+    'RepoConfigEnableAnonymousGitAccessAuditEntry',
+    'RepoConfigEnableCollaboratorsOnlyAuditEntry',
+    'RepoConfigEnableContributorsOnlyAuditEntry',
+    'RepoConfigEnableSockpuppetDisallowedAuditEntry',
+    'RepoConfigLockAnonymousGitAccessAuditEntry',
+    'RepoConfigUnlockAnonymousGitAccessAuditEntry',
+    'RepoCreateAuditEntry',
+    'RepoDestroyAuditEntry',
+    'RepoRemoveMemberAuditEntry',
+    'RepoRemoveTopicAuditEntry',
+    'RepositoryVisibilityChangeDisableAuditEntry',
+    'RepositoryVisibilityChangeEnableAuditEntry',
+    'TeamAddMemberAuditEntry',
+    'TeamAddRepositoryAuditEntry',
+    'TeamChangeParentTeamAuditEntry',
+    'TeamRemoveMemberAuditEntry',
+    'TeamRemoveRepositoryAuditEntry',
+]
+
+
+# Used for argument of CreateProjectV2 mutation.
+OrganizationOrUser: TypeAlias = Union[
+    'Organization',
+    'User',
+]
+
+
+# Types that can grant permissions on a repository to a user
+PermissionGranter: TypeAlias = Union[
+    'Organization',
+    'Repository',
+    'Team',
+]
+
+
+# Types that can be pinned to a profile page.
+PinnableItem: TypeAlias = Union[
+    'Gist',
+    'Repository',
+]
+
+
+# Types that can be inside Project Cards.
+ProjectCardItem: TypeAlias = Union[
+    'Issue',
+    'PullRequest',
+]
+
+
+# Possible collaborators for a project.
+ProjectV2Actor: TypeAlias = Union[
+    'Team',
+    'User',
+]
+
+
+# Configurations for project fields.
+ProjectV2FieldConfiguration: TypeAlias = Union[
+    'ProjectV2Field',
+    'ProjectV2IterationField',
+    'ProjectV2SingleSelectField',
+]
+
+
+# Types that can be inside Project Items.
+ProjectV2ItemContent: TypeAlias = Union[
+    'DraftIssue',
+    'Issue',
+    'PullRequest',
+]
+
+
+# Project field values
+ProjectV2ItemFieldValue: TypeAlias = Union[
+    'ProjectV2ItemFieldDateValue',
+    'ProjectV2ItemFieldIterationValue',
+    'ProjectV2ItemFieldLabelValue',
+    'ProjectV2ItemFieldMilestoneValue',
+    'ProjectV2ItemFieldNumberValue',
+    'ProjectV2ItemFieldPullRequestValue',
+    'ProjectV2ItemFieldRepositoryValue',
+    'ProjectV2ItemFieldReviewerValue',
+    'ProjectV2ItemFieldSingleSelectValue',
+    'ProjectV2ItemFieldTextValue',
+    'ProjectV2ItemFieldUserValue',
+]
+
+
+# An item in a pull request timeline
+PullRequestTimelineItem: TypeAlias = Union[
+    'AssignedEvent',
+    'BaseRefDeletedEvent',
+    'BaseRefForcePushedEvent',
+    'ClosedEvent',
+    'Commit',
+    'CommitCommentThread',
+    'CrossReferencedEvent',
+    'DemilestonedEvent',
+    'DeployedEvent',
+    'DeploymentEnvironmentChangedEvent',
+    'HeadRefDeletedEvent',
+    'HeadRefForcePushedEvent',
+    'HeadRefRestoredEvent',
+    'IssueComment',
+    'LabeledEvent',
+    'LockedEvent',
+    'MergedEvent',
+    'MilestonedEvent',
+    'PullRequestReview',
+    'PullRequestReviewComment',
+    'PullRequestReviewThread',
+    'ReferencedEvent',
+    'RenamedTitleEvent',
+    'ReopenedEvent',
+    'ReviewDismissedEvent',
+    'ReviewRequestRemovedEvent',
+    'ReviewRequestedEvent',
+    'SubscribedEvent',
+    'UnassignedEvent',
+    'UnlabeledEvent',
+    'UnlockedEvent',
+    'UnsubscribedEvent',
+    'UserBlockedEvent',
+]
+
+
+# An item in a pull request timeline
+PullRequestTimelineItems: TypeAlias = Union[
+    'AddedToMergeQueueEvent',
+    'AddedToProjectEvent',
+    'AssignedEvent',
+    'AutoMergeDisabledEvent',
+    'AutoMergeEnabledEvent',
+    'AutoRebaseEnabledEvent',
+    'AutoSquashEnabledEvent',
+    'AutomaticBaseChangeFailedEvent',
+    'AutomaticBaseChangeSucceededEvent',
+    'BaseRefChangedEvent',
+    'BaseRefDeletedEvent',
+    'BaseRefForcePushedEvent',
+    'ClosedEvent',
+    'CommentDeletedEvent',
+    'ConnectedEvent',
+    'ConvertToDraftEvent',
+    'ConvertedNoteToIssueEvent',
+    'ConvertedToDiscussionEvent',
+    'CrossReferencedEvent',
+    'DemilestonedEvent',
+    'DeployedEvent',
+    'DeploymentEnvironmentChangedEvent',
+    'DisconnectedEvent',
+    'HeadRefDeletedEvent',
+    'HeadRefForcePushedEvent',
+    'HeadRefRestoredEvent',
+    'IssueComment',
+    'LabeledEvent',
+    'LockedEvent',
+    'MarkedAsDuplicateEvent',
+    'MentionedEvent',
+    'MergedEvent',
+    'MilestonedEvent',
+    'MovedColumnsInProjectEvent',
+    'PinnedEvent',
+    'PullRequestCommit',
+    'PullRequestCommitCommentThread',
+    'PullRequestReview',
+    'PullRequestReviewThread',
+    'PullRequestRevisionMarker',
+    'ReadyForReviewEvent',
+    'ReferencedEvent',
+    'RemovedFromMergeQueueEvent',
+    'RemovedFromProjectEvent',
+    'RenamedTitleEvent',
+    'ReopenedEvent',
+    'ReviewDismissedEvent',
+    'ReviewRequestRemovedEvent',
+    'ReviewRequestedEvent',
+    'SubscribedEvent',
+    'TransferredEvent',
+    'UnassignedEvent',
+    'UnlabeledEvent',
+    'UnlockedEvent',
+    'UnmarkedAsDuplicateEvent',
+    'UnpinnedEvent',
+    'UnsubscribedEvent',
+    'UserBlockedEvent',
+]
+
+
+# Types that can be an actor.
+PushAllowanceActor: TypeAlias = Union[
+    'App',
+    'Team',
+    'User',
+]
+
+
+# Types that can be assigned to reactions.
+Reactor: TypeAlias = Union[
+    'Bot',
+    'Mannequin',
+    'Organization',
+    'User',
+]
+
+
+# Any referenceable object
+ReferencedSubject: TypeAlias = Union[
+    'Issue',
+    'PullRequest',
+]
+
+
+# An object which has a renamable title
+RenamedTitleSubject: TypeAlias = Union[
+    'Issue',
+    'PullRequest',
+]
+
+
+# Types that can be requested reviewers.
+RequestedReviewer: TypeAlias = Union[
+    'Bot',
+    'Mannequin',
+    'Team',
+    'User',
+]
+
+
+# Types that can be an actor.
+ReviewDismissalAllowanceActor: TypeAlias = Union[
+    'App',
+    'Team',
+    'User',
+]
+
+
+# Types which can be parameters for `RepositoryRule` objects.
+RuleParameters: TypeAlias = Union[
+    'BranchNamePatternParameters',
+    'CommitAuthorEmailPatternParameters',
+    'CommitMessagePatternParameters',
+    'CommitterEmailPatternParameters',
+    'PullRequestParameters',
+    'RequiredDeploymentsParameters',
+    'RequiredStatusChecksParameters',
+    'TagNamePatternParameters',
+    'UpdateParameters',
+    'WorkflowsParameters',
+]
+
+
+# Types which can have `RepositoryRule` objects.
+RuleSource: TypeAlias = Union[
+    'Organization',
+    'Repository',
+]
+
+
+# The results of a search.
+SearchResultItem: TypeAlias = Union[
+    'App',
+    'Discussion',
+    'Issue',
+    'MarketplaceListing',
+    'Organization',
+    'PullRequest',
+    'Repository',
+    'User',
+]
+
+
+# Entities that can sponsor others via GitHub Sponsors
+Sponsor: TypeAlias = Union[
+    'Organization',
+    'User',
+]
+
+
+# Entities that can be sponsored via GitHub Sponsors
+SponsorableItem: TypeAlias = Union[
+    'Organization',
+    'User',
+]
+
+
+# A record that can be featured on a GitHub Sponsors profile.
+SponsorsListingFeatureableItem: TypeAlias = Union[
+    'Repository',
+    'User',
+]
+
+
+# Types that can be inside a StatusCheckRollup context.
+StatusCheckRollupContext: TypeAlias = Union[
+    'CheckRun',
+    'StatusContext',
+]
+
+
+# Types that can own a verifiable domain.
+VerifiableDomainOwner: TypeAlias = Union[
+    'Enterprise',
+    'Organization',
+]
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/simple_star_wars.py 0.34.0-1/tests/data/expected/main/graphql/simple_star_wars.py
--- 0.26.4-3/tests/data/expected/main/graphql/simple_star_wars.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/simple_star_wars.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,154 @@
+# generated by datamodel-codegen:
+#   filename:  simple-star-wars.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, Optional, TypeAlias
+
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+ID: TypeAlias = str
+"""
+The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID.
+"""
+
+
+Int: TypeAlias = int
+"""
+The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class Film(BaseModel):
+    characters: List[Person]
+    characters_ids: List[ID]
+    director: String
+    episode_id: Int
+    id: ID
+    opening_crawl: String
+    planets: List[Planet]
+    planets_ids: List[ID]
+    producer: Optional[String] = None
+    release_date: String
+    species: List[Species]
+    species_ids: List[ID]
+    starships: List[Starship]
+    starships_ids: List[ID]
+    title: String
+    vehicles: List[Vehicle]
+    vehicles_ids: List[ID]
+    typename__: Optional[Literal['Film']] = Field('Film', alias='__typename')
+
+
+class Person(BaseModel):
+    birth_year: Optional[String] = None
+    eye_color: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    gender: Optional[String] = None
+    hair_color: Optional[String] = None
+    height: Optional[Int] = None
+    homeworld: Optional[Planet] = None
+    homeworld_id: Optional[ID] = None
+    id: ID
+    mass: Optional[Int] = None
+    name: String
+    skin_color: Optional[String] = None
+    species: List[Species]
+    species_ids: List[ID]
+    starships: List[Starship]
+    starships_ids: List[ID]
+    vehicles: List[Vehicle]
+    vehicles_ids: List[ID]
+    typename__: Optional[Literal['Person']] = Field('Person', alias='__typename')
+
+
+class Planet(BaseModel):
+    climate: Optional[String] = None
+    diameter: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    gravity: Optional[String] = None
+    id: ID
+    name: String
+    orbital_period: Optional[String] = None
+    population: Optional[String] = None
+    residents: List[Person]
+    residents_ids: List[ID]
+    rotation_period: Optional[String] = None
+    surface_water: Optional[String] = None
+    terrain: Optional[String] = None
+    typename__: Optional[Literal['Planet']] = Field('Planet', alias='__typename')
+
+
+class Species(BaseModel):
+    average_height: Optional[String] = None
+    average_lifespan: Optional[String] = None
+    classification: Optional[String] = None
+    designation: Optional[String] = None
+    eye_colors: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    hair_colors: Optional[String] = None
+    id: ID
+    language: Optional[String] = None
+    name: String
+    people: List[Person]
+    people_ids: List[ID]
+    skin_colors: Optional[String] = None
+    typename__: Optional[Literal['Species']] = Field('Species', alias='__typename')
+
+
+class Starship(BaseModel):
+    MGLT: Optional[String] = None
+    cargo_capacity: Optional[String] = None
+    consumables: Optional[String] = None
+    cost_in_credits: Optional[String] = None
+    crew: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    hyperdrive_rating: Optional[String] = None
+    id: ID
+    length: Optional[String] = None
+    manufacturer: Optional[String] = None
+    max_atmosphering_speed: Optional[String] = None
+    model: Optional[String] = None
+    name: String
+    passengers: Optional[String] = None
+    pilots: List[Person]
+    pilots_ids: List[ID]
+    starship_class: Optional[String] = None
+    typename__: Optional[Literal['Starship']] = Field('Starship', alias='__typename')
+
+
+class Vehicle(BaseModel):
+    cargo_capacity: Optional[String] = None
+    consumables: Optional[String] = None
+    cost_in_credits: Optional[String] = None
+    crew: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    length: Optional[String] = None
+    manufacturer: Optional[String] = None
+    max_atmosphering_speed: Optional[String] = None
+    model: Optional[String] = None
+    name: String
+    passengers: Optional[String] = None
+    pilots: List[Person]
+    pilots_ids: List[ID]
+    vehicle_class: Optional[String] = None
+    typename__: Optional[Literal['Vehicle']] = Field('Vehicle', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/simple_star_wars_dataclass.py 0.34.0-1/tests/data/expected/main/graphql/simple_star_wars_dataclass.py
--- 0.26.4-3/tests/data/expected/main/graphql/simple_star_wars_dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/simple_star_wars_dataclass.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,159 @@
+# generated by datamodel-codegen:
+#   filename:  simple-star-wars.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import List, Literal, Optional, TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+ID: TypeAlias = str
+"""
+The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID.
+"""
+
+
+Int: TypeAlias = int
+"""
+The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+@dataclass
+class Film:
+    characters: List[Person]
+    characters_ids: List[ID]
+    director: String
+    episode_id: Int
+    id: ID
+    opening_crawl: String
+    planets: List[Planet]
+    planets_ids: List[ID]
+    release_date: String
+    species: List[Species]
+    species_ids: List[ID]
+    starships: List[Starship]
+    starships_ids: List[ID]
+    title: String
+    vehicles: List[Vehicle]
+    vehicles_ids: List[ID]
+    producer: Optional[String] = None
+    typename__: Optional[Literal['Film']] = 'Film'
+
+
+@dataclass
+class Person:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    species: List[Species]
+    species_ids: List[ID]
+    starships: List[Starship]
+    starships_ids: List[ID]
+    vehicles: List[Vehicle]
+    vehicles_ids: List[ID]
+    birth_year: Optional[String] = None
+    eye_color: Optional[String] = None
+    gender: Optional[String] = None
+    hair_color: Optional[String] = None
+    height: Optional[Int] = None
+    homeworld: Optional[Planet] = None
+    homeworld_id: Optional[ID] = None
+    mass: Optional[Int] = None
+    skin_color: Optional[String] = None
+    typename__: Optional[Literal['Person']] = 'Person'
+
+
+@dataclass
+class Planet:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    residents: List[Person]
+    residents_ids: List[ID]
+    climate: Optional[String] = None
+    diameter: Optional[String] = None
+    gravity: Optional[String] = None
+    orbital_period: Optional[String] = None
+    population: Optional[String] = None
+    rotation_period: Optional[String] = None
+    surface_water: Optional[String] = None
+    terrain: Optional[String] = None
+    typename__: Optional[Literal['Planet']] = 'Planet'
+
+
+@dataclass
+class Species:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    people: List[Person]
+    people_ids: List[ID]
+    average_height: Optional[String] = None
+    average_lifespan: Optional[String] = None
+    classification: Optional[String] = None
+    designation: Optional[String] = None
+    eye_colors: Optional[String] = None
+    hair_colors: Optional[String] = None
+    language: Optional[String] = None
+    skin_colors: Optional[String] = None
+    typename__: Optional[Literal['Species']] = 'Species'
+
+
+@dataclass
+class Starship:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    pilots: List[Person]
+    pilots_ids: List[ID]
+    MGLT: Optional[String] = None
+    cargo_capacity: Optional[String] = None
+    consumables: Optional[String] = None
+    cost_in_credits: Optional[String] = None
+    crew: Optional[String] = None
+    hyperdrive_rating: Optional[String] = None
+    length: Optional[String] = None
+    manufacturer: Optional[String] = None
+    max_atmosphering_speed: Optional[String] = None
+    model: Optional[String] = None
+    passengers: Optional[String] = None
+    starship_class: Optional[String] = None
+    typename__: Optional[Literal['Starship']] = 'Starship'
+
+
+@dataclass
+class Vehicle:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    pilots: List[Person]
+    pilots_ids: List[ID]
+    cargo_capacity: Optional[String] = None
+    consumables: Optional[String] = None
+    cost_in_credits: Optional[String] = None
+    crew: Optional[String] = None
+    length: Optional[String] = None
+    manufacturer: Optional[String] = None
+    max_atmosphering_speed: Optional[String] = None
+    model: Optional[String] = None
+    passengers: Optional[String] = None
+    vehicle_class: Optional[String] = None
+    typename__: Optional[Literal['Vehicle']] = 'Vehicle'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/simple_star_wars_extra_fields_allow.py 0.34.0-1/tests/data/expected/main/graphql/simple_star_wars_extra_fields_allow.py
--- 0.26.4-3/tests/data/expected/main/graphql/simple_star_wars_extra_fields_allow.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/simple_star_wars_extra_fields_allow.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,172 @@
+# generated by datamodel-codegen:
+#   filename:  simple-star-wars.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, Optional, TypeAlias
+
+from pydantic import BaseModel, Extra, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+ID: TypeAlias = str
+"""
+The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID.
+"""
+
+
+Int: TypeAlias = int
+"""
+The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class Film(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    characters: List[Person]
+    characters_ids: List[ID]
+    director: String
+    episode_id: Int
+    id: ID
+    opening_crawl: String
+    planets: List[Planet]
+    planets_ids: List[ID]
+    producer: Optional[String] = None
+    release_date: String
+    species: List[Species]
+    species_ids: List[ID]
+    starships: List[Starship]
+    starships_ids: List[ID]
+    title: String
+    vehicles: List[Vehicle]
+    vehicles_ids: List[ID]
+    typename__: Optional[Literal['Film']] = Field('Film', alias='__typename')
+
+
+class Person(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    birth_year: Optional[String] = None
+    eye_color: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    gender: Optional[String] = None
+    hair_color: Optional[String] = None
+    height: Optional[Int] = None
+    homeworld: Optional[Planet] = None
+    homeworld_id: Optional[ID] = None
+    id: ID
+    mass: Optional[Int] = None
+    name: String
+    skin_color: Optional[String] = None
+    species: List[Species]
+    species_ids: List[ID]
+    starships: List[Starship]
+    starships_ids: List[ID]
+    vehicles: List[Vehicle]
+    vehicles_ids: List[ID]
+    typename__: Optional[Literal['Person']] = Field('Person', alias='__typename')
+
+
+class Planet(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    climate: Optional[String] = None
+    diameter: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    gravity: Optional[String] = None
+    id: ID
+    name: String
+    orbital_period: Optional[String] = None
+    population: Optional[String] = None
+    residents: List[Person]
+    residents_ids: List[ID]
+    rotation_period: Optional[String] = None
+    surface_water: Optional[String] = None
+    terrain: Optional[String] = None
+    typename__: Optional[Literal['Planet']] = Field('Planet', alias='__typename')
+
+
+class Species(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    average_height: Optional[String] = None
+    average_lifespan: Optional[String] = None
+    classification: Optional[String] = None
+    designation: Optional[String] = None
+    eye_colors: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    hair_colors: Optional[String] = None
+    id: ID
+    language: Optional[String] = None
+    name: String
+    people: List[Person]
+    people_ids: List[ID]
+    skin_colors: Optional[String] = None
+    typename__: Optional[Literal['Species']] = Field('Species', alias='__typename')
+
+
+class Starship(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    MGLT: Optional[String] = None
+    cargo_capacity: Optional[String] = None
+    consumables: Optional[String] = None
+    cost_in_credits: Optional[String] = None
+    crew: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    hyperdrive_rating: Optional[String] = None
+    id: ID
+    length: Optional[String] = None
+    manufacturer: Optional[String] = None
+    max_atmosphering_speed: Optional[String] = None
+    model: Optional[String] = None
+    name: String
+    passengers: Optional[String] = None
+    pilots: List[Person]
+    pilots_ids: List[ID]
+    starship_class: Optional[String] = None
+    typename__: Optional[Literal['Starship']] = Field('Starship', alias='__typename')
+
+
+class Vehicle(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    cargo_capacity: Optional[String] = None
+    consumables: Optional[String] = None
+    cost_in_credits: Optional[String] = None
+    crew: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    length: Optional[String] = None
+    manufacturer: Optional[String] = None
+    max_atmosphering_speed: Optional[String] = None
+    model: Optional[String] = None
+    name: String
+    passengers: Optional[String] = None
+    pilots: List[Person]
+    pilots_ids: List[ID]
+    vehicle_class: Optional[String] = None
+    typename__: Optional[Literal['Vehicle']] = Field('Vehicle', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/union.py 0.34.0-1/tests/data/expected/main/graphql/union.py
--- 0.26.4-3/tests/data/expected/main/graphql/union.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/union.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,59 @@
+# generated by datamodel-codegen:
+#   filename:  union.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional, TypeAlias, Union
+
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+ID: TypeAlias = str
+"""
+The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID.
+"""
+
+
+Int: TypeAlias = int
+"""
+The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class IResource(BaseModel):
+    id: ID
+    typename__: Optional[Literal['IResource']] = Field('IResource', alias='__typename')
+
+
+class Car(IResource):
+    id: ID
+    passengerCapacity: Int
+    typename__: Optional[Literal['Car']] = Field('Car', alias='__typename')
+
+
+class Employee(IResource):
+    firstName: Optional[String] = None
+    id: ID
+    lastName: Optional[String] = None
+    typename__: Optional[Literal['Employee']] = Field('Employee', alias='__typename')
+
+
+Resource: TypeAlias = Union[
+    'Car',
+    'Employee',
+]
+
+
+TechnicalResource: TypeAlias = Car
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/use_standard_collections.py 0.34.0-1/tests/data/expected/main/graphql/use_standard_collections.py
--- 0.26.4-3/tests/data/expected/main/graphql/use_standard_collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/use_standard_collections.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  use-standard-collections.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional, TypeAlias
+
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    field: String
+    listField: list[String]
+    listListField: list[list[String]]
+    typename__: Optional[Literal['A']] = Field('A', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/use_union_operator.py 0.34.0-1/tests/data/expected/main/graphql/use_union_operator.py
--- 0.26.4-3/tests/data/expected/main/graphql/use_union_operator.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/graphql/use_union_operator.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,36 @@
+# generated by datamodel-codegen:
+#   filename:  use-union-operator.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, TypeAlias
+
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    field: String
+    listField: List[String]
+    listListField: List[List[String]]
+    listListOptionalField: List[List[String | None]]
+    listOptionalField: List[String | None]
+    listOptionalListField: List[List[String] | None]
+    listOptionalListOptionalField: List[List[String | None] | None]
+    optionalField: String | None = None
+    optionalListListField: List[List[String]] | None = None
+    optionalListListOptionalField: List[List[String | None]] | None = None
+    optionalListOptionalField: List[String | None] | None = None
+    optionalListOptionalListField: List[List[String] | None] | None = None
+    typename__: Literal['A'] | None = Field('A', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/json/general.py 0.34.0-1/tests/data/expected/main/json/general.py
--- 0.26.4-3/tests/data/expected/main/json/general.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/json/general.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  pet.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    name: str
+    age: int
+
+
+class Model(BaseModel):
+    Pet: Pet
diff -pruN 0.26.4-3/tests/data/expected/main/json/json_array_include_null.py 0.34.0-1/tests/data/expected/main/json/json_array_include_null.py
--- 0.26.4-3/tests/data/expected/main/json/json_array_include_null.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/json/json_array_include_null.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  array_include_null.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Item(BaseModel):
+    oofield: Optional[List[int]]
+
+
+class Model(BaseModel):
+    items: List[Item]
diff -pruN 0.26.4-3/tests/data/expected/main/json/json_reuse_model.py 0.34.0-1/tests/data/expected/main/json/json_reuse_model.py
--- 0.26.4-3/tests/data/expected/main/json/json_reuse_model.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/json/json_reuse_model.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  duplicate_models.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class ArmRight(BaseModel):
+    Joint_1: int = Field(..., alias='Joint 1')
+    Joint_2: int = Field(..., alias='Joint 2')
+    Joint_3: int = Field(..., alias='Joint 3')
+
+
+class ArmLeft(ArmRight):
+    pass
+
+
+class Head(BaseModel):
+    Joint_1: int = Field(..., alias='Joint 1')
+
+
+class Model(BaseModel):
+    Arm_Right: ArmRight = Field(..., alias='Arm Right')
+    Arm_Left: ArmLeft = Field(..., alias='Arm Left')
+    Head: Head
diff -pruN 0.26.4-3/tests/data/expected/main/json/json_reuse_model_pydantic2.py 0.34.0-1/tests/data/expected/main/json/json_reuse_model_pydantic2.py
--- 0.26.4-3/tests/data/expected/main/json/json_reuse_model_pydantic2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/json/json_reuse_model_pydantic2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  duplicate_models.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class ArmRight(BaseModel):
+    Joint_1: int = Field(..., alias='Joint 1')
+    Joint_2: int = Field(..., alias='Joint 2')
+    Joint_3: int = Field(..., alias='Joint 3')
+
+
+ArmLeft = ArmRight
+
+
+class Head(BaseModel):
+    Joint_1: int = Field(..., alias='Joint 1')
+
+
+class Model(BaseModel):
+    Arm_Right: ArmRight = Field(..., alias='Arm Right')
+    Arm_Left: ArmLeft = Field(..., alias='Arm Left')
+    Head_1: Head = Field(..., alias='Head')
diff -pruN 0.26.4-3/tests/data/expected/main/json/json_snake_case_field.py 0.34.0-1/tests/data/expected/main/json/json_snake_case_field.py
--- 0.26.4-3/tests/data/expected/main/json/json_snake_case_field.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/json/json_snake_case_field.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  snake_case.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Model(BaseModel):
+    snake_case: str
+    camel_case: str = Field(..., alias='camelCase')
+    kebab_case: str = Field(..., alias='kebab-case')
+    pascal_case: str = Field(..., alias='PascalCase')
+    upper_case: str = Field(..., alias='UPPER_CASE')
+    dev_info: str = Field(..., alias='Dev_Info')
+    clone_device: str = Field(..., alias='CLONE_Device')
diff -pruN 0.26.4-3/tests/data/expected/main/json/simple_json_snake_case_field.py 0.34.0-1/tests/data/expected/main/json/simple_json_snake_case_field.py
--- 0.26.4-3/tests/data/expected/main/json/simple_json_snake_case_field.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/json/simple_json_snake_case_field.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  simple.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Model(BaseModel):
+    pet_name: str = Field(..., alias='petName')
diff -pruN 0.26.4-3/tests/data/expected/main/json/space_and_special_characters.py 0.34.0-1/tests/data/expected/main/json/space_and_special_characters.py
--- 0.26.4-3/tests/data/expected/main/json/space_and_special_characters.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/json/space_and_special_characters.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,46 @@
+# generated by datamodel-codegen:
+#   filename:  space_and_special_characters.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class InitialParameters(BaseModel):
+    V1: int
+    V2: int
+
+
+class Data(BaseModel):
+    Length__m_: float = Field(..., alias='Length (m)')
+    Symmetric_deviation____: float = Field(..., alias='Symmetric deviation (%)')
+    Total_running_time__s_: int = Field(..., alias='Total running time (s)')
+    Mass__kg_: float = Field(..., alias='Mass (kg)')
+    Initial_parameters: InitialParameters = Field(..., alias='Initial parameters')
+    class_: str = Field(..., alias='class')
+
+
+class Values(BaseModel):
+    field_1_Step: str = Field(..., alias='1 Step')
+    field_2_Step: str = Field(..., alias='2 Step')
+
+
+class Recursive1(BaseModel):
+    value: float
+
+
+class Sub(BaseModel):
+    recursive: Recursive1
+
+
+class Recursive(BaseModel):
+    sub: Sub
+
+
+class Model(BaseModel):
+    Serial_Number: str = Field(..., alias='Serial Number')
+    Timestamp: str
+    Data: Data
+    values: Values
+    recursive: Recursive
diff -pruN 0.26.4-3/tests/data/expected/main/json/typed_dict_space_and_special_characters.py 0.34.0-1/tests/data/expected/main/json/typed_dict_space_and_special_characters.py
--- 0.26.4-3/tests/data/expected/main/json/typed_dict_space_and_special_characters.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/json/typed_dict_space_and_special_characters.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,58 @@
+# generated by datamodel-codegen:
+#   filename:  space_and_special_characters.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import TypedDict
+
+
+class InitialParameters(TypedDict):
+    V1: int
+    V2: int
+
+
+Data = TypedDict(
+    'Data',
+    {
+        'Length (m)': float,
+        'Symmetric deviation (%)': float,
+        'Total running time (s)': int,
+        'Mass (kg)': float,
+        'Initial parameters': InitialParameters,
+        'class': str,
+    },
+)
+
+
+Values = TypedDict(
+    'Values',
+    {
+        '1 Step': str,
+        '2 Step': str,
+    },
+)
+
+
+class Recursive1(TypedDict):
+    value: float
+
+
+class Sub(TypedDict):
+    recursive: Recursive1
+
+
+class Recursive(TypedDict):
+    sub: Sub
+
+
+Model = TypedDict(
+    'Model',
+    {
+        'Serial Number': str,
+        'Timestamp': str,
+        'Data': Data,
+        'values': Values,
+        'recursive': Recursive,
+    },
+)
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_any_of/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/all_of_any_of/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_any_of/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/all_of_any_of/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  all_of_any_of
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_any_of/direct.py 0.34.0-1/tests/data/expected/main/jsonschema/all_of_any_of/direct.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_any_of/direct.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/all_of_any_of/direct.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,37 @@
+# generated by datamodel-codegen:
+#   filename:  direct.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Union
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Target1(BaseModel):
+    first: str
+
+
+class Target2(BaseModel):
+    second: str
+
+
+class Target3(BaseModel):
+    third: str
+
+
+class Target4(Target1, Target3):
+    pass
+
+
+class Target5(Target2, Target3):
+    pass
+
+
+class Target(BaseModel):
+    __root__: Union[Target4, Target5]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_any_of/reference.py 0.34.0-1/tests/data/expected/main/jsonschema/all_of_any_of/reference.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_any_of/reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/all_of_any_of/reference.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,37 @@
+# generated by datamodel-codegen:
+#   filename:  reference.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Union
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class First(BaseModel):
+    first: str
+
+
+class Second(BaseModel):
+    second: str
+
+
+class Target(BaseModel):
+    third: str
+
+
+class Target8(First, Target):
+    pass
+
+
+class Target9(Second, Target):
+    pass
+
+
+class Target6(BaseModel):
+    __root__: Union[Target8, Target9]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_one_of/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/all_of_one_of/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_one_of/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/all_of_one_of/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  all_of_one_of
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_one_of/direct.py 0.34.0-1/tests/data/expected/main/jsonschema/all_of_one_of/direct.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_one_of/direct.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/all_of_one_of/direct.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,37 @@
+# generated by datamodel-codegen:
+#   filename:  direct.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Union
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Target1(BaseModel):
+    first: str
+
+
+class Target2(BaseModel):
+    second: str
+
+
+class Target3(BaseModel):
+    third: str
+
+
+class Target4(Target1, Target3):
+    pass
+
+
+class Target5(Target2, Target3):
+    pass
+
+
+class Target(BaseModel):
+    __root__: Union[Target4, Target5]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_one_of/reference.py 0.34.0-1/tests/data/expected/main/jsonschema/all_of_one_of/reference.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_one_of/reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/all_of_one_of/reference.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,37 @@
+# generated by datamodel-codegen:
+#   filename:  reference.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Union
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class First(BaseModel):
+    first: str
+
+
+class Second(BaseModel):
+    second: str
+
+
+class Target(BaseModel):
+    third: str
+
+
+class Target8(First, Target):
+    pass
+
+
+class Target9(Second, Target):
+    pass
+
+
+class Target6(BaseModel):
+    __root__: Union[Target8, Target9]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_ref.py 0.34.0-1/tests/data/expected/main/jsonschema/all_of_ref.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/all_of_ref.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  test.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class First(BaseModel):
+    second: str = Field(..., description='Second', examples=['second'])
+
+
+class Test(First):
+    pass
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_ref_self.py 0.34.0-1/tests/data/expected/main/jsonschema/all_of_ref_self.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_ref_self.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/all_of_ref_self.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  all_of_ref_self.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Version(BaseModel):
+    __root__: None
+
+
+class Model(BaseModel):
+    version: Optional[Version] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_use_default.py 0.34.0-1/tests/data/expected/main/jsonschema/all_of_use_default.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_use_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/all_of_use_default.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  all_of_default.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Item(BaseModel):
+    test: Optional[str] = 'test123'
+    testarray: Optional[List[str]] = Field(['test123'], min_items=1, title='test array')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_with_object.py 0.34.0-1/tests/data/expected/main/jsonschema/all_of_with_object.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_with_object.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/all_of_with_object.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  all_of_with_object.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Home(BaseModel):
+    address: Optional[str] = None
+    zip: Optional[str] = None
+
+
+class Kind(BaseModel):
+    description: Optional[str] = None
+
+
+class Id(BaseModel):
+    id: Optional[int] = None
+
+
+class Pet(Home, Kind, Id):
+    name: Optional[str] = None
+    age: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/array_field_constraints.py 0.34.0-1/tests/data/expected/main/jsonschema/array_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/array_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/array_field_constraints.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,22 @@
+# generated by datamodel-codegen:
+#   filename:  array_field_constraints.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import BaseModel, Field
+
+
+class Number(BaseModel):
+    __root__: str = Field(
+        ...,
+        description='Just a number',
+        examples=['1', '5464446', '684572369854259'],
+        regex='^\\d{1,15}$',
+    )
+
+
+class TestSchema(BaseModel):
+    numbers: List[Number] = Field(..., description='A list of numbers')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/array_in_additional_properties.py 0.34.0-1/tests/data/expected/main/jsonschema/array_in_additional_properties.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/array_in_additional_properties.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/array_in_additional_properties.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  array_in_additional_properties.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, List
+
+from pydantic import BaseModel
+
+
+class MyJsonOfListOfString(BaseModel):
+    __root__: Dict[str, List[str]]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/autodetect.py 0.34.0-1/tests/data/expected/main/jsonschema/autodetect.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/autodetect.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/autodetect.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field, conint
+
+
+class Person(BaseModel):
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    lastName: Optional[str] = Field(None, description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[List] = None
+    comment: None = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/boolean_property.py 0.34.0-1/tests/data/expected/main/jsonschema/boolean_property.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/boolean_property.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/boolean_property.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  boolean_property.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    field: Optional[Any] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/circular_reference.py 0.34.0-1/tests/data/expected/main/jsonschema/circular_reference.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/circular_reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/circular_reference.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,35 @@
+# generated by datamodel-codegen:
+#   filename:  circular_reference.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Member(BaseModel):
+    __root__: User = Field(..., title='Member')
+
+
+class User(BaseModel):
+    name: Optional[str] = None
+    pet: Optional[Animal] = None
+    home: Optional[House] = None
+
+
+class Animal(BaseModel):
+    name: Optional[str] = None
+    breeder: Optional[User] = None
+    home: Optional[House] = None
+
+
+class House(BaseModel):
+    address: Optional[str] = None
+    owner: Optional[User] = None
+
+
+Member.update_forward_refs()
+User.update_forward_refs()
+Animal.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/combine_any_of_object.py 0.34.0-1/tests/data/expected/main/jsonschema/combine_any_of_object.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/combine_any_of_object.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/combine_any_of_object.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,52 @@
+# generated by datamodel-codegen:
+#   filename:  combine_any_of_object.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, Extra, Field
+
+
+class MySchema1(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+    State: Optional[str] = None
+    ZipCode: str
+
+
+class MySchema2(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+    County: Optional[str] = None
+    PostCode: str
+
+
+class US(BaseModel):
+    County: Optional[str] = None
+    PostCode: str
+
+
+class MySchema3(US):
+    class Config:
+        extra = Extra.allow
+
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+
+
+class MySchema(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    __root__: Union[MySchema1, MySchema2, MySchema3] = Field(..., title='My schema')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/combine_any_of_object_left_to_right.py 0.34.0-1/tests/data/expected/main/jsonschema/combine_any_of_object_left_to_right.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/combine_any_of_object_left_to_right.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/combine_any_of_object_left_to_right.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,51 @@
+# generated by datamodel-codegen:
+#   filename:  combine_any_of_object.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, ConfigDict, Field, RootModel
+
+
+class MySchema1(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+    State: Optional[str] = None
+    ZipCode: str
+
+
+class MySchema2(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+    County: Optional[str] = None
+    PostCode: str
+
+
+class US(BaseModel):
+    County: Optional[str] = None
+    PostCode: str
+
+
+class MySchema3(US):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+
+
+class MySchema(RootModel[Union[MySchema1, MySchema2, MySchema3]]):
+    root: Union[MySchema1, MySchema2, MySchema3] = Field(
+        ..., title='My schema', union_mode='left_to_right'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/combine_any_of_object_v2.py 0.34.0-1/tests/data/expected/main/jsonschema/combine_any_of_object_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/combine_any_of_object_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/combine_any_of_object_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,49 @@
+# generated by datamodel-codegen:
+#   filename:  combine_any_of_object.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, ConfigDict, Field, RootModel
+
+
+class MySchema1(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+    State: Optional[str] = None
+    ZipCode: str
+
+
+class MySchema2(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+    County: Optional[str] = None
+    PostCode: str
+
+
+class US(BaseModel):
+    County: Optional[str] = None
+    PostCode: str
+
+
+class MySchema3(US):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+
+
+class MySchema(RootModel[Union[MySchema1, MySchema2, MySchema3]]):
+    root: Union[MySchema1, MySchema2, MySchema3] = Field(..., title='My schema')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/combine_one_of_object.py 0.34.0-1/tests/data/expected/main/jsonschema/combine_one_of_object.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/combine_one_of_object.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/combine_one_of_object.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,52 @@
+# generated by datamodel-codegen:
+#   filename:  combine_one_of_object.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, Extra, Field
+
+
+class MySchema1(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+    State: Optional[str] = None
+    ZipCode: str
+
+
+class MySchema2(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+    County: Optional[str] = None
+    PostCode: str
+
+
+class US(BaseModel):
+    County: Optional[str] = None
+    PostCode: str
+
+
+class MySchema3(US):
+    class Config:
+        extra = Extra.allow
+
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+
+
+class MySchema(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    __root__: Union[MySchema1, MySchema2, MySchema3] = Field(..., title='My schema')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/combined_array.py 0.34.0-1/tests/data/expected/main/jsonschema/combined_array.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/combined_array.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/combined_array.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,116 @@
+# generated by datamodel-codegen:
+#   filename:  combined_array.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Pet1(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class Pet(BaseModel):
+    __root__: Union[List[Pet1], Pet1] = Field(..., title='Pet')
+
+
+class CombinedEnum1(Enum):
+    green = 'green'
+    red = 'red'
+
+
+class CombinedEnumField(Enum):
+    green = 'green'
+    red = 'red'
+
+
+class CombinedObjectField1(BaseModel):
+    color: Optional[str] = None
+
+
+class CombinedSelf1(BaseModel):
+    color: Optional[str] = None
+
+
+class CombinedSelf(BaseModel):
+    __root__: Union[List[CombinedSelf1], CombinedSelf1]
+
+
+class CombinedSelfEnum1(BaseModel):
+    color: Optional[str] = None
+
+
+class CombinedSelfEnum2(Enum):
+    green = 'green'
+    red = 'red'
+
+
+class CombinedSelfEnum(BaseModel):
+    __root__: Union[
+        List[Union[CombinedSelfEnum1, CombinedSelfEnum2]],
+        CombinedSelfEnum1,
+        CombinedSelfEnum2,
+    ]
+
+
+class CombinedSelfAllOf2(Enum):
+    green = 'green'
+    red = 'red'
+
+
+class Kind(BaseModel):
+    description: Optional[str] = None
+
+
+class Id(BaseModel):
+    id: Optional[int] = None
+
+
+class CustomRootModel(BaseModel):
+    __root__: str
+
+
+class CombinedEnum(BaseModel):
+    __root__: Union[List[Kind], CombinedEnum1]
+
+
+class CombinedAllOf1(Kind, Id):
+    pass
+
+
+class CombinedAllOf(BaseModel):
+    __root__: Union[List[Kind], CombinedAllOf1]
+
+
+class CombinedAllOfField(Kind, Id):
+    pass
+
+
+class CombinedAllOfObjectField(Kind, Id):
+    color: Optional[str] = None
+
+
+class CombinedObjectField(BaseModel):
+    CombinedEnumField: Optional[Union[List[Kind], CombinedEnumField]] = None
+    CombinedAllOfField: Optional[Union[List[Kind], CombinedAllOfField]] = None
+    CombinedObjectField: Optional[Union[List[Kind], CombinedObjectField1]] = None
+    CombinedAllOfObjectField: Optional[
+        Union[List[Kind], CombinedAllOfObjectField]
+    ] = None
+
+
+class CombinedSelfAllOf1(Kind, Id):
+    color: Optional[str] = None
+
+
+class CombinedSelfAllOf(BaseModel):
+    __root__: Union[
+        List[Union[CombinedSelfAllOf1, CombinedSelfAllOf2]],
+        CombinedSelfAllOf1,
+        CombinedSelfAllOf2,
+    ]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/complex_any_of.py 0.34.0-1/tests/data/expected/main/jsonschema/complex_any_of.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/complex_any_of.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/complex_any_of.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,29 @@
+# generated by datamodel-codegen:
+#   filename:  complex_any_of.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Union
+
+from pydantic import BaseModel, Extra
+
+
+class Key(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    address: str
+    nat: str
+
+
+class ModelItem(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    key: Key
+    value: str
+
+
+class Model(BaseModel):
+    __root__: Union[int, List[ModelItem]]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/complex_one_of.py 0.34.0-1/tests/data/expected/main/jsonschema/complex_one_of.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/complex_one_of.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/complex_one_of.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,29 @@
+# generated by datamodel-codegen:
+#   filename:  complex_one_of.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Union
+
+from pydantic import BaseModel, Extra
+
+
+class Key(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    address: str
+    nat: str
+
+
+class ModelItem(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    key: Key
+    value: str
+
+
+class Model(BaseModel):
+    __root__: Union[int, List[ModelItem]]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/complicated_enum_default_member.py 0.34.0-1/tests/data/expected/main/jsonschema/complicated_enum_default_member.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/complicated_enum_default_member.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/complicated_enum_default_member.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  complicated_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ProcessingStatus(Enum):
+    COMPLETED = 'COMPLETED'
+    PENDING = 'PENDING'
+    FAILED = 'FAILED'
+
+
+class Kind(BaseModel):
+    __root__: str
+
+
+class ProcessingTask(BaseModel):
+    processing_status_union: Optional[ProcessingStatus] = ProcessingStatus.COMPLETED
+    processing_status: Optional[ProcessingStatus] = ProcessingStatus.COMPLETED
+    name: Optional[str] = None
+    kind: Optional[Kind] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/complicated_enum_default_member_dataclass.py 0.34.0-1/tests/data/expected/main/jsonschema/complicated_enum_default_member_dataclass.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/complicated_enum_default_member_dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/complicated_enum_default_member_dataclass.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  complicated_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from enum import Enum
+from typing import Optional
+
+
+class ProcessingStatus(Enum):
+    COMPLETED = 'COMPLETED'
+    PENDING = 'PENDING'
+    FAILED = 'FAILED'
+
+
+Kind = str
+
+
+@dataclass
+class ProcessingTask:
+    processing_status_union: Optional[ProcessingStatus] = ProcessingStatus.COMPLETED
+    processing_status: Optional[ProcessingStatus] = ProcessingStatus.COMPLETED
+    name: Optional[str] = None
+    kind: Optional[Kind] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/custom_base_path.py 0.34.0-1/tests/data/expected/main/jsonschema/custom_base_path.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/custom_base_path.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/custom_base_path.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,34 @@
+# generated by datamodel-codegen:
+#   filename:  custom_base_path.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from custom.models import Animal, Person, Property
+
+
+class Spouse(Person):
+    job: Optional[str] = None
+
+
+class Pet(Animal):
+    name: Optional[str] = None
+
+
+class Child(Person):
+    school: Optional[str] = None
+    grade: Optional[float] = None
+    pets: Optional[List[Pet]] = None
+
+
+class Owner(Person):
+    job: Optional[str] = None
+    spouse: Optional[Spouse] = None
+    children: Optional[List[Child]] = None
+
+
+class House(Property):
+    address: str
+    owner: Optional[Owner] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/custom_formatters.py 0.34.0-1/tests/data/expected/main/jsonschema/custom_formatters.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/custom_formatters.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/custom_formatters.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+# MIT License
+#
+# Copyright (c) 2023 Blah-blah
+#
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field, conint
+
+
+class Person(BaseModel):
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    lastName: Optional[str] = Field(None, description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[List] = None
+    comment: None = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/custom_type_path.py 0.34.0-1/tests/data/expected/main/jsonschema/custom_type_path.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/custom_type_path.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/custom_type_path.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,36 @@
+# generated by datamodel-codegen:
+#   filename:  custom_type_path.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+from custom import MultipleLineString, SpecialString, TitleString
+from custom.collection.array import Friends
+from custom.special import UpperString
+from custom.special.numbers import Age
+
+
+class Person(BaseModel):
+    class Config:
+        arbitrary_types_allowed = True
+
+    firstName: Optional[TitleString] = Field(
+        None, description="The person's first name."
+    )
+    lastName: Optional[UpperString] = Field(None, description="The person's last name.")
+    age: Optional[Age] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[Friends] = None
+    comment: Optional[MultipleLineString] = None
+
+
+class RootedCustomType(BaseModel):
+    class Config:
+        arbitrary_types_allowed = True
+
+    __root__: SpecialString
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/custom_type_path_pydantic_v2.py 0.34.0-1/tests/data/expected/main/jsonschema/custom_type_path_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/custom_type_path_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/custom_type_path_pydantic_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,36 @@
+# generated by datamodel-codegen:
+#   filename:  custom_type_path.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, ConfigDict, Field, RootModel
+
+from custom import MultipleLineString, SpecialString, TitleString
+from custom.collection.array import Friends
+from custom.special import UpperString
+from custom.special.numbers import Age
+
+
+class Person(BaseModel):
+    model_config = ConfigDict(
+        arbitrary_types_allowed=True,
+    )
+    firstName: Optional[TitleString] = Field(
+        None, description="The person's first name."
+    )
+    lastName: Optional[UpperString] = Field(None, description="The person's last name.")
+    age: Optional[Age] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[Friends] = None
+    comment: Optional[MultipleLineString] = None
+
+
+class RootedCustomType(RootModel[SpecialString]):
+    model_config = ConfigDict(
+        arbitrary_types_allowed=True,
+    )
+    root: SpecialString
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/dataclass_const.py 0.34.0-1/tests/data/expected/main/jsonschema/dataclass_const.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/dataclass_const.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/dataclass_const.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  const.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Literal
+
+
+@dataclass
+class Const:
+    foo: Literal['foo']
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/dataclass_field.py 0.34.0-1/tests/data/expected/main/jsonschema/dataclass_field.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/dataclass_field.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/dataclass_field.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  user.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+from typing import Any, List, Optional
+
+Model = Any
+
+
+@dataclass
+class User:
+    name: Optional[str] = None
+    pets: List[User] = field(default_factory=list)
+
+
+@dataclass
+class Pet:
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/dataclass_field_default.py 0.34.0-1/tests/data/expected/main/jsonschema/dataclass_field_default.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/dataclass_field_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/dataclass_field_default.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  user_default.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+from typing import Any, List, Optional
+
+Model = Any
+
+
+@dataclass
+class User:
+    name: Optional[str] = None
+    pets: Optional[List[User]] = field(default_factory=lambda: ['dog', 'cat'])
+
+
+@dataclass
+class Pet:
+    name: Optional[str] = 'dog'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_literals.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_literals.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_literals.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_literals.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,28 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_literals.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Type1(BaseModel):
+    type_: Literal['a'] = Field('a', title='Type ')
+
+
+class Type2(BaseModel):
+    type_: Literal['b'] = Field('b', title='Type ')
+
+
+class UnrelatedType(BaseModel):
+    info: Optional[str] = Field(
+        'Unrelated type, not involved in the discriminated union',
+        title='A way to check for side effects',
+    )
+
+
+class Response(BaseModel):
+    inner: Union[Type1, Type2] = Field(..., discriminator='type_', title='Inner')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_literals_msgspec.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_literals_msgspec.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_literals_msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_literals_msgspec.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_literals.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, ClassVar, Literal, Optional, Union
+
+from msgspec import Meta, Struct
+
+
+class Type1(Struct, tag_field='type_', tag='a'):
+    type_: ClassVar[Annotated[Literal['a'], Meta(title='Type ')]] = 'a'
+
+
+class Type2(Struct, tag_field='type_', tag='b'):
+    type_: ClassVar[Annotated[Literal['b'], Meta(title='Type ')]] = 'b'
+
+
+class UnrelatedType(Struct):
+    info: Optional[Annotated[str, Meta(title='A way to check for side effects')]] = (
+        'Unrelated type, not involved in the discriminated union'
+    )
+
+
+class Response(Struct):
+    inner: Annotated[Union[Type1, Type2], Meta(title='Inner')]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_literals_msgspec_keyword_only.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_literals_msgspec_keyword_only.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_literals_msgspec_keyword_only.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_literals_msgspec_keyword_only.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_literals.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, ClassVar, Literal, Optional, Union
+
+from msgspec import Meta, Struct
+
+
+class Type1(Struct, kw_only=True, tag_field='type_', tag='a'):
+    type_: ClassVar[Annotated[Literal['a'], Meta(title='Type ')]] = 'a'
+
+
+class Type2(Struct, kw_only=True, tag_field='type_', tag='b'):
+    type_: ClassVar[Annotated[Literal['b'], Meta(title='Type ')]] = 'b'
+
+
+class UnrelatedType(Struct, kw_only=True):
+    info: Optional[Annotated[str, Meta(title='A way to check for side effects')]] = (
+        'Unrelated type, not involved in the discriminated union'
+    )
+
+
+class Response(Struct, kw_only=True):
+    inner: Annotated[Union[Type1, Type2], Meta(title='Inner')]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_literals_msgspec_keyword_only_omit_defaults.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_literals_msgspec_keyword_only_omit_defaults.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_literals_msgspec_keyword_only_omit_defaults.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_literals_msgspec_keyword_only_omit_defaults.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_literals.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, ClassVar, Literal, Optional, Union
+
+from msgspec import Meta, Struct
+
+
+class Type1(Struct, omit_defaults=True, kw_only=True, tag_field='type_', tag='a'):
+    type_: ClassVar[Annotated[Literal['a'], Meta(title='Type ')]] = 'a'
+
+
+class Type2(Struct, omit_defaults=True, kw_only=True, tag_field='type_', tag='b'):
+    type_: ClassVar[Annotated[Literal['b'], Meta(title='Type ')]] = 'b'
+
+
+class UnrelatedType(Struct, omit_defaults=True, kw_only=True):
+    info: Optional[Annotated[str, Meta(title='A way to check for side effects')]] = (
+        'Unrelated type, not involved in the discriminated union'
+    )
+
+
+class Response(Struct, omit_defaults=True, kw_only=True):
+    inner: Annotated[Union[Type1, Type2], Meta(title='Inner')]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_no_mapping.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_no_mapping.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_no_mapping.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_no_mapping.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_no_mapping.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Union
+
+from pydantic import BaseModel, Field
+
+
+class Cat(BaseModel):
+    pet_type: Literal['cat']
+
+
+class Dog(BaseModel):
+    pet_type: Literal['dog']
+
+
+class Animal(BaseModel):
+    pet: Union[Cat, Dog] = Field(..., discriminator='pet_type', title='Pet')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_reference.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_reference.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_reference.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,36 @@
+# generated by datamodel-codegen:
+#   filename:  schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Type1(BaseModel):
+    type_: Literal['a'] = Field('a', title='Type ')
+
+
+class Type2(BaseModel):
+    type_: Literal['b'] = Field('b', title='Type ')
+    ref_type: Optional[Type1] = Field(None, description='A referenced type.')
+
+
+class Type4(BaseModel):
+    type_: Literal['d'] = Field('d', title='Type ')
+
+
+class Type5(BaseModel):
+    type_: Literal['e'] = Field('e', title='Type ')
+
+
+class Type3(BaseModel):
+    type_: Literal['c'] = Field('c', title='Type ')
+
+
+class Response(BaseModel):
+    inner: Union[Type1, Type2, Type3, Type4, Type5] = Field(
+        ..., discriminator='type_', title='Inner'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_reference_msgspec.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_reference_msgspec.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_reference_msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_reference_msgspec.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,34 @@
+# generated by datamodel-codegen:
+#   filename:  schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, ClassVar, Literal, Optional, Union
+
+from msgspec import Meta, Struct
+
+
+class Type1(Struct, tag_field='type_', tag='a'):
+    type_: ClassVar[Annotated[Literal['a'], Meta(title='Type ')]] = 'a'
+
+
+class Type2(Struct, tag_field='type_', tag='b'):
+    type_: ClassVar[Annotated[Literal['b'], Meta(title='Type ')]] = 'b'
+    ref_type: Optional[Annotated[Type1, Meta(description='A referenced type.')]] = None
+
+
+class Type4(Struct, tag_field='type_', tag='d'):
+    type_: ClassVar[Annotated[Literal['d'], Meta(title='Type ')]] = 'd'
+
+
+class Type5(Struct, tag_field='type_', tag='e'):
+    type_: ClassVar[Annotated[Literal['e'], Meta(title='Type ')]] = 'e'
+
+
+class Type3(Struct, tag_field='type_', tag='c'):
+    type_: ClassVar[Annotated[Literal['c'], Meta(title='Type ')]] = 'c'
+
+
+class Response(Struct):
+    inner: Annotated[Union[Type1, Type2, Type3, Type4, Type5], Meta(title='Inner')]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_external_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_external_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/artificial_folder/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/artificial_folder/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/artificial_folder/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/artificial_folder/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_external_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/artificial_folder/type_1.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/artificial_folder/type_1.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/artificial_folder/type_1.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/artificial_folder/type_1.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  inner_folder/artificial_folder/type-1.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal
+
+from pydantic import BaseModel, Field
+
+
+class Type1(BaseModel):
+    type_: Literal['a'] = Field(..., const=True, title='Type ')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/schema.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/schema.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  inner_folder/schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Union
+
+from pydantic import BaseModel, Field
+
+from .. import type_4
+from ..subfolder import type_5
+from . import type_2
+from .artificial_folder import type_1
+
+
+class Type3(BaseModel):
+    type_: Literal['c'] = Field(..., const=True, title='Type ')
+
+
+class Response(BaseModel):
+    inner: Union[type_1.Type1, type_2.Type2, Type3, type_4.Type4, type_5.Type5] = Field(
+        ..., discriminator='type_', title='Inner'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/type_2.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/type_2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/type_2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/type_2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  inner_folder/type-2.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional
+
+from pydantic import BaseModel, Field
+
+from .artificial_folder import type_1
+
+
+class Type2(BaseModel):
+    type_: Literal['b'] = Field(..., const=True, title='Type ')
+    ref_type: Optional[type_1.Type1] = Field(None, description='A referenced type.')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/subfolder/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/subfolder/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/subfolder/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/subfolder/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_external_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/subfolder/type_5.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/subfolder/type_5.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/subfolder/type_5.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/subfolder/type_5.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  subfolder/type-5.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal
+
+from pydantic import BaseModel, Field
+
+
+class Type5(BaseModel):
+    type_: Literal['e'] = Field(..., const=True, title='Type ')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/type_4.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/type_4.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/type_4.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/type_4.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  type-4.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal
+
+from pydantic import BaseModel, Field
+
+
+class Type4(BaseModel):
+    type_: Literal['d'] = Field(..., const=True, title='Type ')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_external_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_external_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/artificial_folder/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/artificial_folder/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/artificial_folder/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/artificial_folder/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_external_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/artificial_folder/type_1.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/artificial_folder/type_1.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/artificial_folder/type_1.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/artificial_folder/type_1.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  inner_folder/artificial_folder/type-1.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, ClassVar, Literal
+
+from msgspec import Meta, Struct
+
+
+class Type1(Struct, tag_field='type_', tag='a'):
+    type_: ClassVar[Annotated[Literal['a'], Meta(title='Type ')]] = 'a'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/schema.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/schema.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  inner_folder/schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, ClassVar, Literal, Union
+
+from msgspec import Meta, Struct
+
+from .. import type_4
+from ..subfolder import type_5
+from . import type_2
+from .artificial_folder import type_1
+
+
+class Type3(Struct, tag_field='type_', tag='c'):
+    type_: ClassVar[Annotated[Literal['c'], Meta(title='Type ')]] = 'c'
+
+
+class Response(Struct):
+    inner: Annotated[
+        Union[type_1.Type1, type_2.Type2, Type3, type_4.Type4, type_5.Type5],
+        Meta(title='Inner'),
+    ]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/type_2.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/type_2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/type_2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/type_2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  inner_folder/type-2.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, ClassVar, Literal, Optional
+
+from msgspec import Meta, Struct
+
+from .artificial_folder import type_1
+
+
+class Type2(Struct, tag_field='type_', tag='b'):
+    type_: ClassVar[Annotated[Literal['b'], Meta(title='Type ')]] = 'b'
+    ref_type: Optional[
+        Annotated[type_1.Type1, Meta(description='A referenced type.')]
+    ] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/subfolder/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/subfolder/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/subfolder/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/subfolder/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_external_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/subfolder/type_5.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/subfolder/type_5.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/subfolder/type_5.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/subfolder/type_5.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  subfolder/type-5.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, ClassVar, Literal
+
+from msgspec import Meta, Struct
+
+
+class Type5(Struct, tag_field='type_', tag='e'):
+    type_: ClassVar[Annotated[Literal['e'], Meta(title='Type ')]] = 'e'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/type_4.py 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/type_4.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/type_4.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/type_4.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  type-4.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, ClassVar, Literal
+
+from msgspec import Meta, Struct
+
+
+class Type4(Struct, tag_field='type_', tag='d'):
+    type_: ClassVar[Annotated[Literal['d'], Meta(title='Type ')]] = 'd'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints/common.py 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints/common.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints/common.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints/common.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  common.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any
+
+from pydantic import RootModel
+
+
+class Model(RootModel[Any]):
+    root: Any
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints/test.py 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints/test.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints/test.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints/test.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  test.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field, constr
+
+
+class Test(BaseModel):
+    uid: constr(pattern=r'[0-9ABCDEFGHJKMNPQRSTVWXYZ]{26,26}') = Field(
+        ..., description='ulid of this object'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/common.py 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/common.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/common.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/common.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  common.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Any
+
+from msgspec import Meta
+
+Model = Any
+
+
+Ulid = Annotated[str, Meta(pattern='[0-9ABCDEFGHJKMNPQRSTVWXYZ]{26,26}')]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/test.py 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/test.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/test.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/test.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  test.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated
+
+from msgspec import Meta, Struct
+
+from . import common
+
+
+class Test(Struct):
+    uid: Annotated[common.Ulid, Meta(description='ulid of this object')]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec_py38_collapse_root_models/common.py 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec_py38_collapse_root_models/common.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec_py38_collapse_root_models/common.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec_py38_collapse_root_models/common.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,9 @@
+# generated by datamodel-codegen:
+#   filename:  common.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any
+
+Model = Any
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec_py38_collapse_root_models/test.py 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec_py38_collapse_root_models/test.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec_py38_collapse_root_models/test.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec_py38_collapse_root_models/test.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  test.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from msgspec import Annotated, Meta, Struct
+
+
+class Test(Struct):
+    uid: Annotated[
+        str,
+        Meta(
+            description='ulid of this object',
+            pattern='[0-9ABCDEFGHJKMNPQRSTVWXYZ]{26,26}',
+        ),
+    ]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_name/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_name/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_name/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_name/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  duplicate_name
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_name/bar.py 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_name/bar.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_name/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_name/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  bar.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Bar(BaseModel):
+    pass
+
+
+class LogLevels(BaseModel):
+    __root__: str = Field(..., description='Supported logging levels')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_name/foo.py 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_name/foo.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_name/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/duplicate_name/foo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  foo.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Foo(BaseModel):
+    pass
+
+
+class LogLevels(BaseModel):
+    __root__: str = Field(..., description='Supported logging levels')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duration_msgspec.py 0.34.0-1/tests/data/expected/main/jsonschema/duration_msgspec.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duration_msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/duration_msgspec.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  duration.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import timedelta
+from typing import Any, Optional
+
+from msgspec import Struct
+
+Model = Any
+
+
+class Test(Struct):
+    s_duration: Optional[timedelta] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duration_pydantic_v2.py 0.34.0-1/tests/data/expected/main/jsonschema/duration_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duration_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/duration_pydantic_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  duration.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import timedelta
+from typing import Any, Optional
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class Model(RootModel[Any]):
+    root: Any
+
+
+class Test(BaseModel):
+    s_duration: Optional[timedelta] = Field(None, examples=['PT2H33M3S'])
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/external_definitions.py 0.34.0-1/tests/data/expected/main/jsonschema/external_definitions.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/external_definitions.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/external_definitions.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  external_definitions_root.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Extra, constr
+
+
+class ElegantName(BaseModel):
+    __root__: constr(min_length=3)
+
+
+class Person(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    name: ElegantName
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/external_files.py 0.34.0-1/tests/data/expected/main/jsonschema/external_files.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/external_files.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/external_files.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  external_parent_root.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ExternalChildRoot(BaseModel):
+    somefield: Optional[int] = None
+
+
+class Object(BaseModel):
+    metadata: ExternalChildRoot
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/external_files_in_directory.py 0.34.0-1/tests/data/expected/main/jsonschema/external_files_in_directory.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/external_files_in_directory.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/external_files_in_directory.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,77 @@
+# generated by datamodel-codegen:
+#   filename:  person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional, Union
+
+from pydantic import BaseModel, Extra, Field, conint
+
+
+class Fur(Enum):
+    Short_hair = 'Short hair'
+    Long_hair = 'Long hair'
+
+
+class Noodle(Enum):
+    ramen = 'ramen'
+    spaghetti = 'spaghetti'
+
+
+class Soup(Enum):
+    bean = 'bean'
+    mushroom = 'mushroom'
+    tomato = 'tomato'
+
+
+class Coffee(Enum):
+    Black = 'Black'
+    Espresso = 'Espresso'
+
+
+class Tea(Enum):
+    Oolong = 'Oolong'
+    Green = 'Green'
+
+
+class Pet(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+    fur: Optional[Fur] = None
+
+
+class Friend(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    name: str = Field(..., example='John Doe')
+    phone_number: Optional[str] = Field(None, example='(555) 555-1234')
+    food: Optional[List[Union[Noodle, Soup]]] = None
+
+
+class Friends(BaseModel):
+    __root__: List[Friend] = Field(..., title='Friends')
+
+
+class Person(BaseModel):
+    first_name: str = Field(..., description="The person's first name.")
+    last_name: str = Field(..., description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(None, description='Age in years.')
+    pets: Optional[List[Pet]] = None
+    friends: Optional[Friends] = None
+    robot: Optional[Robot] = None
+    comment: None = None
+    drink: Optional[List[Union[Coffee, Tea]]] = None
+    food: Optional[List[Union[Noodle, Soup]]] = None
+
+
+class Robot(Pet):
+    friends: Optional[Person] = None
+    drink: Optional[Coffee] = None
+    food: Optional[Noodle] = None
+    pet: Optional[Pet] = None
+
+
+Person.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/external_other_ref2.py 0.34.0-1/tests/data/expected/main/jsonschema/external_other_ref2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/external_other_ref2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/external_other_ref2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  other/ref2.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Other(BaseModel):
+    key: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/external_ref0.py 0.34.0-1/tests/data/expected/main/jsonschema/external_ref0.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/external_ref0.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/external_ref0.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  ref0.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import ref2
+from .other import ref2 as ref2_1
+
+
+class Model(BaseModel):
+    ref1: Optional[ref2.Model] = None
+    other_ref1: Optional[ref2_1.Other] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_allow.py 0.34.0-1/tests/data/expected/main/jsonschema/extra_fields_allow.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_allow.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/extra_fields_allow.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  extra_fields.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Extra
+
+
+class Foo(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    x: Optional[int] = None
+
+
+class Bar(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    y: Optional[int] = None
+
+
+class Baz(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    z: Optional[int] = None
+
+
+class Test(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    foo: Foo
+    bar: Optional[Bar] = None
+    baz: Optional[Baz] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_forbid.py 0.34.0-1/tests/data/expected/main/jsonschema/extra_fields_forbid.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_forbid.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/extra_fields_forbid.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  extra_fields.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Extra
+
+
+class Foo(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    x: Optional[int] = None
+
+
+class Bar(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    y: Optional[int] = None
+
+
+class Baz(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    z: Optional[int] = None
+
+
+class Test(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    foo: Foo
+    bar: Optional[Bar] = None
+    baz: Optional[Baz] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_ignore.py 0.34.0-1/tests/data/expected/main/jsonschema/extra_fields_ignore.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_ignore.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/extra_fields_ignore.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  extra_fields.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Extra
+
+
+class Foo(BaseModel):
+    class Config:
+        extra = Extra.ignore
+
+    x: Optional[int] = None
+
+
+class Bar(BaseModel):
+    class Config:
+        extra = Extra.ignore
+
+    y: Optional[int] = None
+
+
+class Baz(BaseModel):
+    class Config:
+        extra = Extra.ignore
+
+    z: Optional[int] = None
+
+
+class Test(BaseModel):
+    class Config:
+        extra = Extra.ignore
+
+    foo: Foo
+    bar: Optional[Bar] = None
+    baz: Optional[Baz] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_v2_allow.py 0.34.0-1/tests/data/expected/main/jsonschema/extra_fields_v2_allow.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_v2_allow.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/extra_fields_v2_allow.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  extra_fields.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, ConfigDict
+
+
+class Foo(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    x: Optional[int] = None
+
+
+class Bar(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    y: Optional[int] = None
+
+
+class Baz(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    z: Optional[int] = None
+
+
+class Test(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    foo: Foo
+    bar: Optional[Bar] = None
+    baz: Optional[Baz] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_v2_forbid.py 0.34.0-1/tests/data/expected/main/jsonschema/extra_fields_v2_forbid.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_v2_forbid.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/extra_fields_v2_forbid.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  extra_fields.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, ConfigDict
+
+
+class Foo(BaseModel):
+    model_config = ConfigDict(
+        extra='forbid',
+    )
+    x: Optional[int] = None
+
+
+class Bar(BaseModel):
+    model_config = ConfigDict(
+        extra='forbid',
+    )
+    y: Optional[int] = None
+
+
+class Baz(BaseModel):
+    model_config = ConfigDict(
+        extra='forbid',
+    )
+    z: Optional[int] = None
+
+
+class Test(BaseModel):
+    model_config = ConfigDict(
+        extra='forbid',
+    )
+    foo: Foo
+    bar: Optional[Bar] = None
+    baz: Optional[Baz] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_v2_ignore.py 0.34.0-1/tests/data/expected/main/jsonschema/extra_fields_v2_ignore.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_v2_ignore.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/extra_fields_v2_ignore.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  extra_fields.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, ConfigDict
+
+
+class Foo(BaseModel):
+    model_config = ConfigDict(
+        extra='ignore',
+    )
+    x: Optional[int] = None
+
+
+class Bar(BaseModel):
+    model_config = ConfigDict(
+        extra='ignore',
+    )
+    y: Optional[int] = None
+
+
+class Baz(BaseModel):
+    model_config = ConfigDict(
+        extra='ignore',
+    )
+    z: Optional[int] = None
+
+
+class Test(BaseModel):
+    model_config = ConfigDict(
+        extra='ignore',
+    )
+    foo: Foo
+    bar: Optional[Bar] = None
+    baz: Optional[Baz] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/field_extras.py 0.34.0-1/tests/data/expected/main/jsonschema/field_extras.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/field_extras.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/field_extras.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  extras.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Extras(BaseModel):
+    name: Optional[str] = Field(None, description='normal key', example='example')
+    age: Optional[int] = Field(None, example=12, examples=[13, 20])
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_field_extra_keys.py 0.34.0-1/tests/data/expected/main/jsonschema/field_extras_field_extra_keys.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_field_extra_keys.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/field_extras_field_extra_keys.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  extras.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Extras(BaseModel):
+    name: Optional[str] = Field(
+        None,
+        description='normal key',
+        example='example',
+        invalid_key_1='abc',
+        key2=456,
+        repr=True,
+    )
+    age: Optional[int] = Field(None, example=12, examples=[13, 20])
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_field_extra_keys_v2.py 0.34.0-1/tests/data/expected/main/jsonschema/field_extras_field_extra_keys_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_field_extra_keys_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/field_extras_field_extra_keys_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,22 @@
+# generated by datamodel-codegen:
+#   filename:  extras.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Extras(BaseModel):
+    name: Optional[str] = Field(
+        None,
+        description='normal key',
+        examples=['example'],
+        json_schema_extra={'key2': 456, 'invalid-key-1': 'abc'},
+        repr=True,
+    )
+    age: Optional[int] = Field(
+        None, examples=[13, 20], json_schema_extra={'example': 12}
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_field_include_all_keys.py 0.34.0-1/tests/data/expected/main/jsonschema/field_extras_field_include_all_keys.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_field_include_all_keys.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/field_extras_field_include_all_keys.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,29 @@
+# generated by datamodel-codegen:
+#   filename:  extras.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Extras(BaseModel):
+    name: Optional[str] = Field(
+        None,
+        description='normal key',
+        example='example',
+        field_comment='comment',
+        field_exclude=123,
+        field_invalid_key_2='efg',
+        invalid_key_1='abc',
+        key1=123,
+        key2=456,
+        readOnly=True,
+        register_='hij',
+        repr=True,
+        schema_='klm',
+        x_abc=True,
+    )
+    age: Optional[int] = Field(None, example=12, examples=[13, 20], writeOnly=True)
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_field_include_all_keys_v2.py 0.34.0-1/tests/data/expected/main/jsonschema/field_extras_field_include_all_keys_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_field_include_all_keys_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/field_extras_field_include_all_keys_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  extras.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Extras(BaseModel):
+    name: Optional[str] = Field(
+        None,
+        description='normal key',
+        examples=['example'],
+        json_schema_extra={
+            'key1': 123,
+            'key2': 456,
+            '$exclude': 123,
+            'invalid-key-1': 'abc',
+            '-invalid+key_2': 'efg',
+            '$comment': 'comment',
+            'register': 'hij',
+            'schema': 'klm',
+            'x-abc': True,
+            'readOnly': True,
+        },
+        repr=True,
+    )
+    age: Optional[int] = Field(
+        None, examples=[13, 20], json_schema_extra={'example': 12, 'writeOnly': True}
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_v2.py 0.34.0-1/tests/data/expected/main/jsonschema/field_extras_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/field_extras_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  extras.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Extras(BaseModel):
+    name: Optional[str] = Field(None, description='normal key', examples=['example'])
+    age: Optional[int] = Field(
+        None, examples=[13, 20], json_schema_extra={'example': 12}
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/field_has_same_name.py 0.34.0-1/tests/data/expected/main/jsonschema/field_has_same_name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/field_has_same_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/field_has_same_name.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  field_has_same_name.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class TestObject(BaseModel):
+    test_string: Optional[str] = None
+
+
+class Test(BaseModel):
+    TestObject: Optional[TestObject] = Field(None, title='TestObject')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/field_has_same_name_v2.py 0.34.0-1/tests/data/expected/main/jsonschema/field_has_same_name_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/field_has_same_name_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/field_has_same_name_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  field_has_same_name.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class TestObject(BaseModel):
+    test_string: Optional[str] = None
+
+
+class Test(BaseModel):
+    TestObject_1: Optional[TestObject] = Field(
+        None, alias='TestObject', title='TestObject'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/forwarding_reference/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/forwarding_reference/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/forwarding_reference/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/forwarding_reference/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  forwarding_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/forwarding_reference/commons.py 0.34.0-1/tests/data/expected/main/jsonschema/forwarding_reference/commons.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/forwarding_reference/commons.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/forwarding_reference/commons.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  commons.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/forwarding_reference/forwarding.py 0.34.0-1/tests/data/expected/main/jsonschema/forwarding_reference/forwarding.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/forwarding_reference/forwarding.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/forwarding_reference/forwarding.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  forwarding.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class ForwardingArray(BaseModel):
+    __root__: List
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/forwarding_reference/schema.py 0.34.0-1/tests/data/expected/main/jsonschema/forwarding_reference/schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/forwarding_reference/schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/forwarding_reference/schema.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any
+
+from pydantic import BaseModel
+
+from . import forwarding
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Pets(BaseModel):
+    __root__: forwarding.ForwardingArray
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/general.py 0.34.0-1/tests/data/expected/main/jsonschema/general.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/general.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/general.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field, conint
+
+
+class Person(BaseModel):
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    lastName: Optional[str] = Field(None, description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[List] = None
+    comment: None = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/generate_non_pydantic_output.py 0.34.0-1/tests/data/expected/main/jsonschema/generate_non_pydantic_output.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/generate_non_pydantic_output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/generate_non_pydantic_output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  simple_string.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+
+
+@dataclass
+class Model:
+    s: str
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/has_default_value.py 0.34.0-1/tests/data/expected/main/jsonschema/has_default_value.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/has_default_value.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/has_default_value.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,43 @@
+# generated by datamodel-codegen:
+#   filename:  has_default_value.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class TeamType(Enum):
+    Department = 'Department'
+    Division = 'Division'
+    BusinessUnit = 'BusinessUnit'
+    Organization = 'Organization'
+
+
+class ID(BaseModel):
+    __root__: str
+
+
+class Pet(BaseModel):
+    name: Optional[str] = None
+
+
+class Family(BaseModel):
+    __root__: List[ID]
+
+
+class FamilyPets(BaseModel):
+    __root__: List[Pet]
+
+
+class Person(BaseModel):
+    id: Optional[ID] = Field(default_factory=lambda: ID.parse_obj('abc'))
+    user: Optional[Pet] = None
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    team: Optional[TeamType] = 'Department'
+    anotherTeam: Optional[TeamType] = 'Department'
+    Family: Optional[Family] = None
+    FamilyPets: Optional[FamilyPets] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/id.py 0.34.0-1/tests/data/expected/main/jsonschema/id.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/id.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/id.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  id.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Address(BaseModel):
+    street_address: str
+    city: str
+    state: str
+
+
+class Model(BaseModel):
+    billing_address: Optional[Address] = None
+    shipping_address: Optional[Address] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/id_stdin.py 0.34.0-1/tests/data/expected/main/jsonschema/id_stdin.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/id_stdin.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/id_stdin.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  <stdin>
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Address(BaseModel):
+    street_address: str
+    city: str
+    state: str
+
+
+class Model(BaseModel):
+    billing_address: Optional[Address] = None
+    shipping_address: Optional[Address] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ids/ContactPoint.py 0.34.0-1/tests/data/expected/main/jsonschema/ids/ContactPoint.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ids/ContactPoint.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/ids/ContactPoint.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  Organization.schema.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, EmailStr
+
+from . import type as type_1
+
+
+class Schema(BaseModel):
+    type: type_1.Schema
+    contactType: Optional[str] = None
+    email: EmailStr
+    telephone: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ids/URI.py 0.34.0-1/tests/data/expected/main/jsonschema/ids/URI.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ids/URI.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/ids/URI.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  Organization.schema.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Schema(BaseModel):
+    __root__: AnyUrl = Field(..., description='String representing a URI.', title='URI')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ids/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/ids/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ids/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/ids/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  Organization.schema.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import URI, ContactPoint
+from . import id as id_1
+from . import name as name_1
+from . import sameAs as sameAs_1
+from . import type as type_1
+
+
+class Organization(BaseModel):
+    id: Optional[id_1.Schema] = None
+    type: type_1.Schema
+    name: name_1.Schema
+    contactPoint: Optional[ContactPoint.Schema] = None
+    sameAs: Optional[sameAs_1.Schema] = None
+    url: Optional[URI.Schema] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ids/id.py 0.34.0-1/tests/data/expected/main/jsonschema/ids/id.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ids/id.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/ids/id.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  Organization.schema.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Schema(BaseModel):
+    __root__: str = Field(
+        ..., description='Identifier string of this object.', title='id'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ids/name.py 0.34.0-1/tests/data/expected/main/jsonschema/ids/name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ids/name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/ids/name.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  Organization.schema.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Schema(BaseModel):
+    __root__: str = Field(
+        ...,
+        description="A descriptive (full) name of the entity. For example, a dataset called 'Snow depth in the Northern Hemisphere' or a person called 'Sarah L. Jones' or a place called 'The Empire States Building'. Use unique names for distinct entities whenever possible.",
+        title='name',
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ids/sameAs.py 0.34.0-1/tests/data/expected/main/jsonschema/ids/sameAs.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ids/sameAs.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/ids/sameAs.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  Organization.schema.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+from . import URI
+
+
+class Schema(BaseModel):
+    __root__: URI.Schema = Field(
+        ...,
+        description='Use the sameAs property to indicate the most canonical URLs for the original in cases of the entity. For example this may be a link to the original metadata of a dataset, definition of a property, Person, Organization or Place.',
+        title='sameAs',
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ids/type.py 0.34.0-1/tests/data/expected/main/jsonschema/ids/type.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ids/type.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/ids/type.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  Organization.schema.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Schema(BaseModel):
+    __root__: str = Field(..., description='Type of this object.', title='type')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/imports_correct/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/imports_correct/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/imports_correct/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/imports_correct/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  imports_correct
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/imports_correct/schema.py 0.34.0-1/tests/data/expected/main/jsonschema/imports_correct/schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/imports_correct/schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/imports_correct/schema.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+from . import type_1
+
+
+class Response(BaseModel):
+    inner: type_1.Type1 = Field(..., discriminator='type_', title='Inner')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/imports_correct/type_1.py 0.34.0-1/tests/data/expected/main/jsonschema/imports_correct/type_1.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/imports_correct/type_1.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/imports_correct/type_1.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  type_1.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal
+
+from pydantic import BaseModel, Field
+
+
+class Type(Enum):
+    a = 'a'
+    A = 'A'
+
+
+class Type1(BaseModel):
+    type_: Literal['a', 'A'] = Field(..., title='Type')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/inheritance_forward_ref.py 0.34.0-1/tests/data/expected/main/jsonschema/inheritance_forward_ref.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/inheritance_forward_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/inheritance_forward_ref.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,50 @@
+# generated by datamodel-codegen:
+#   filename:  inheritance_forward_ref.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class DogBase(BaseModel):
+    name: Optional[str] = Field(None, title='Name')
+    woof: Optional[bool] = Field(True, title='Woof')
+
+
+class PersonBase(BaseModel):
+    name: Optional[str] = Field(None, title='Name')
+
+
+class PersonsBestFriend(BaseModel):
+    people: Optional[List[Person]] = Field(None, title='People')
+    dogs: Optional[List[Dog]] = Field(None, title='Dogs')
+    dog_base: Optional[DogBase] = None
+    dog_relationships: Optional[DogRelationships] = None
+    person_base: Optional[PersonBase] = None
+    person_relationships: Optional[PersonRelationships] = None
+
+
+class DogRelationships(BaseModel):
+    people: Optional[List[Person]] = Field(None, title='People')
+
+
+class PersonRelationships(BaseModel):
+    people: Optional[List[Person]] = Field(None, title='People')
+
+
+class Dog(DogBase, DogRelationships):
+    pass
+
+
+class Person(PersonBase, PersonRelationships):
+    pass
+
+
+PersonsBestFriend.update_forward_refs()
+DogRelationships.update_forward_refs()
+PersonRelationships.update_forward_refs()
+Dog.update_forward_refs()
+Person.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/inheritance_forward_ref_keep_model_order.py 0.34.0-1/tests/data/expected/main/jsonschema/inheritance_forward_ref_keep_model_order.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/inheritance_forward_ref_keep_model_order.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/inheritance_forward_ref_keep_model_order.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,50 @@
+# generated by datamodel-codegen:
+#   filename:  inheritance_forward_ref.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class DogBase(BaseModel):
+    name: Optional[str] = Field(None, title='Name')
+    woof: Optional[bool] = Field(True, title='Woof')
+
+
+class DogRelationships(BaseModel):
+    people: Optional[List[Person]] = Field(None, title='People')
+
+
+class PersonBase(BaseModel):
+    name: Optional[str] = Field(None, title='Name')
+
+
+class PersonRelationships(BaseModel):
+    people: Optional[List[Person]] = Field(None, title='People')
+
+
+class PersonsBestFriend(BaseModel):
+    people: Optional[List[Person]] = Field(None, title='People')
+    dogs: Optional[List[Dog]] = Field(None, title='Dogs')
+    dog_base: Optional[DogBase] = None
+    dog_relationships: Optional[DogRelationships] = None
+    person_base: Optional[PersonBase] = None
+    person_relationships: Optional[PersonRelationships] = None
+
+
+class Dog(DogBase, DogRelationships):
+    pass
+
+
+class Person(PersonBase, PersonRelationships):
+    pass
+
+
+DogRelationships.update_forward_refs()
+PersonRelationships.update_forward_refs()
+PersonsBestFriend.update_forward_refs()
+Dog.update_forward_refs()
+Person.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/invalid_enum_name.py 0.34.0-1/tests/data/expected/main/jsonschema/invalid_enum_name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/invalid_enum_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/invalid_enum_name.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  invalid_enum_name.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InvalidEnum(Enum):
+    field_1_value = '1 value'
+    field_space = ' space'
+    field___special = '*- special'
+    schema = 'schema'
+    MRO = 'MRO'
+    mro_ = 'mro'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/invalid_enum_name_snake_case_field.py 0.34.0-1/tests/data/expected/main/jsonschema/invalid_enum_name_snake_case_field.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/invalid_enum_name_snake_case_field.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/invalid_enum_name_snake_case_field.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  invalid_enum_name.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InvalidEnum(Enum):
+    field_1_value = '1 value'
+    field_space = ' space'
+    field___special = '*- special'
+    schema = 'schema'
+    mro_1 = 'MRO'
+    mro_ = 'mro'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/invalid_import_name/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/invalid_import_name/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/invalid_import_name/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/invalid_import_name/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  invalid_import_name
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/invalid_import_name/array_commons_schema.py 0.34.0-1/tests/data/expected/main/jsonschema/invalid_import_name/array_commons_schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/invalid_import_name/array_commons_schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/invalid_import_name/array_commons_schema.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  array-commons.schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List
+
+from pydantic import Field, RootModel
+
+
+class Commons(RootModel[Any]):
+    root: Any = Field(..., description='Commons objects', title='Commons')
+
+
+class DefaultArray(RootModel[List]):
+    root: List = Field(..., max_length=100, min_length=1)
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/invalid_import_name/products_schema.py 0.34.0-1/tests/data/expected/main/jsonschema/invalid_import_name/products_schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/invalid_import_name/products_schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/invalid_import_name/products_schema.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  products.schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import Field, RootModel
+
+from . import array_commons_schema
+
+
+class Products(RootModel[array_commons_schema.DefaultArray]):
+    root: array_commons_schema.DefaultArray = Field(
+        ..., description='The products in the catalog', title='Products'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/invalid_model_name.py 0.34.0-1/tests/data/expected/main/jsonschema/invalid_model_name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/invalid_model_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/invalid_model_name.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  invalid_model_name.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field, conint
+
+
+class ValidModelName(BaseModel):
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    lastName: Optional[str] = Field(None, description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[List] = None
+    comment: None = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/items_boolean.py 0.34.0-1/tests/data/expected/main/jsonschema/items_boolean.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/items_boolean.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/items_boolean.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  items_boolean.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    example: Optional[List] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/json_capitalise_enum_members.py 0.34.0-1/tests/data/expected/main/jsonschema/json_capitalise_enum_members.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/json_capitalise_enum_members.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/json_capitalise_enum_members.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  many_case_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class Model(Enum):
+    SNAKE_CASE = 'snake_case'
+    CAP_CASE = 'CAP_CASE'
+    CAMEL_CASE = 'CamelCase'
+    UPPERCASE = 'UPPERCASE'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/json_pointer.py 0.34.0-1/tests/data/expected/main/jsonschema/json_pointer.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/json_pointer.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/json_pointer.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,42 @@
+# generated by datamodel-codegen:
+#   filename:  json_pointer.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Optional, Union
+
+from pydantic import BaseModel, Extra, Field
+
+
+class Pets(BaseModel):
+    __root__: Any
+
+
+class PetType(Enum):
+    Cat = 'Cat'
+
+
+class Cat(BaseModel):
+    pet_type: PetType
+    hunts: bool
+    age: str
+
+
+class PetType1(Enum):
+    Dog = 'Dog'
+
+
+class Dog(BaseModel):
+    pet_type: PetType1
+    bark: bool
+    breed: str
+
+
+class Person(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    name: Optional[str] = Field(None, title='name')
+    pet: Optional[Union[Cat, Dog]] = Field(None, title='pet')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/json_pointer_array.py 0.34.0-1/tests/data/expected/main/jsonschema/json_pointer_array.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/json_pointer_array.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/json_pointer_array.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,29 @@
+# generated by datamodel-codegen:
+#   filename:  json_pointer_array.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Union
+
+from pydantic import BaseModel, EmailStr
+
+
+class Email(BaseModel):
+    email: EmailStr
+
+
+class Error(BaseModel):
+    code: str
+
+
+class Model1(BaseModel):
+    emails: List[Email]
+
+
+class Model2(BaseModel):
+    errors: List[Error]
+
+
+class Model(BaseModel):
+    __root__: Union[Model1, Model2]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/json_reuse_enum.py 0.34.0-1/tests/data/expected/main/jsonschema/json_reuse_enum.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/json_reuse_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/json_reuse_enum.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,34 @@
+# generated by datamodel-codegen:
+#   filename:  duplicate_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Animal(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class RedistributeEnum(Enum):
+    static = 'static'
+    connected = 'connected'
+
+
+class User(BaseModel):
+    name: Optional[str] = None
+    animal: Optional[Animal] = 'dog'
+    pet: Optional[Animal] = 'cat'
+    redistribute: Optional[List[RedistributeEnum]] = None
+
+
+class Redistribute(BaseModel):
+    __root__: List[RedistributeEnum] = Field(
+        ..., description='Redistribute type for routes.'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/json_reuse_enum_default_member.py 0.34.0-1/tests/data/expected/main/jsonschema/json_reuse_enum_default_member.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/json_reuse_enum_default_member.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/json_reuse_enum_default_member.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,34 @@
+# generated by datamodel-codegen:
+#   filename:  duplicate_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Animal(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class RedistributeEnum(Enum):
+    static = 'static'
+    connected = 'connected'
+
+
+class User(BaseModel):
+    name: Optional[str] = None
+    animal: Optional[Animal] = Animal.dog
+    pet: Optional[Animal] = Animal.cat
+    redistribute: Optional[List[RedistributeEnum]] = None
+
+
+class Redistribute(BaseModel):
+    __root__: List[RedistributeEnum] = Field(
+        ..., description='Redistribute type for routes.'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/long_description.py 0.34.0-1/tests/data/expected/main/jsonschema/long_description.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/long_description.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/long_description.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  long_description.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class LongDescription(BaseModel):
+    summary: Optional[str] = Field(None, description='summary for object')
+    description: Optional[str] = Field(
+        None,
+        description='datamodel-code-generator. This code generator creates pydantic model from an openapi file and others.',
+    )
+    multi_line: Optional[str] = Field(
+        None,
+        description='datamodel-code-generator\nThis code generator creates pydantic model from an openapi file and others.\n\n\nSupported source types\nOpenAPI 3 (YAML/JSON, OpenAPI Data Type)\nJSON Schema (JSON Schema Core/JSON Schema Validation)\nJSON/YAML/CSV Data (it will be converted to JSON Schema)\nPython dictionary (it will be converted to JSON Schema)',
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/long_description_wrap_string_literal.py 0.34.0-1/tests/data/expected/main/jsonschema/long_description_wrap_string_literal.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/long_description_wrap_string_literal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/long_description_wrap_string_literal.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,30 @@
+# generated by datamodel-codegen:
+#   filename:  long_description.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class LongDescription(BaseModel):
+    summary: Optional[str] = Field(None, description='summary for object')
+    description: Optional[str] = Field(
+        None,
+        description=(
+            'datamodel-code-generator. This code generator creates pydantic model from'
+            ' an openapi file and others.'
+        ),
+    )
+    multi_line: Optional[str] = Field(
+        None,
+        description=(
+            'datamodel-code-generator\nThis code generator creates pydantic model from'
+            ' an openapi file and others.\n\n\nSupported source types\nOpenAPI 3'
+            ' (YAML/JSON, OpenAPI Data Type)\nJSON Schema (JSON Schema Core/JSON Schema'
+            ' Validation)\nJSON/YAML/CSV Data (it will be converted to JSON'
+            ' Schema)\nPython dictionary (it will be converted to JSON Schema)'
+        ),
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/main_root_one_of/bar.py 0.34.0-1/tests/data/expected/main/jsonschema/main_root_one_of/bar.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/main_root_one_of/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/main_root_one_of/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  bar.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class JobRun(BaseModel):
+    enabled: Optional[bool] = Field(False, description='If Live Execution is Enabled.')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/main_root_one_of/foo.py 0.34.0-1/tests/data/expected/main/jsonschema/main_root_one_of/foo.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/main_root_one_of/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/main_root_one_of/foo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  foo.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class JobRun(BaseModel):
+    enabled: Optional[bool] = Field(False, description='If Live Execution is enabled')
+    resources: Optional[List[str]] = Field(
+        None, description='Resource full classname to register to extend any endpoints.'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/main_root_one_of/union.py 0.34.0-1/tests/data/expected/main/jsonschema/main_root_one_of/union.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/main_root_one_of/union.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/main_root_one_of/union.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  union.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, Field
+
+from . import bar, foo
+
+
+class ExecutionContext(BaseModel):
+    __root__: Union[foo.JobRun, bar.JobRun] = Field(
+        ..., description='Execution Configuration.'
+    )
+
+
+class App(BaseModel):
+    runtime: Optional[ExecutionContext] = Field(
+        None, description='Execution Configuration.'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/modular_default_enum_member/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/modular_default_enum_member/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/modular_default_enum_member/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/modular_default_enum_member/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular_default_enum_member
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/modular_default_enum_member/bar.py 0.34.0-1/tests/data/expected/main/jsonschema/modular_default_enum_member/bar.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/modular_default_enum_member/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/modular_default_enum_member/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  bar.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+from pydantic import BaseModel
+
+
+class Bar(BaseModel):
+    pass
+
+
+class LogLevels(Enum):
+    DEBUG = 'DEBUG'
+    INFO = 'INFO'
+    WARN = 'WARN'
+    ERROR = 'ERROR'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/modular_default_enum_member/foo.py 0.34.0-1/tests/data/expected/main/jsonschema/modular_default_enum_member/foo.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/modular_default_enum_member/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/modular_default_enum_member/foo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  foo.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+from . import bar
+from .nested_bar import bar as bar_1
+
+
+class Foo(BaseModel):
+    loggerLevel: Optional[bar.LogLevels] = bar.LogLevels.INFO
+    AnotherLoggerLevel: Optional[bar_1.LogLevels] = bar_1.LogLevels.ERROR
+    OtherLoggerLevels: Optional[List[bar_1.LogLevels]] = [
+        bar_1.LogLevels.INFO,
+        bar_1.LogLevels.ERROR,
+    ]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  multiple_files
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/file_a.py 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files/file_a.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/file_a.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files/file_a.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  file_a.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import file_b
+
+
+class ModelA(BaseModel):
+    firstName: Optional[str] = None
+    modelB: Optional[file_b.ModelB] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/file_b.py 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files/file_b.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/file_b.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files/file_b.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  file_b.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class ModelB(BaseModel):
+    metadata: str
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/file_c.py 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files/file_c.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/file_c.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files/file_c.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  file_c.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import file_b
+
+
+class ModelC(BaseModel):
+    firstName: Optional[str] = None
+    modelB: Optional[file_b.ModelB] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/file_d.py 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files/file_d.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/file_d.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files/file_d.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  file_d.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import file_a
+
+
+class ModelD(BaseModel):
+    firstName: Optional[str] = None
+    modelA: Optional[file_a.ModelA] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_json_pointer/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files_json_pointer/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_json_pointer/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files_json_pointer/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  multiple_files_json_pointer
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_a.py 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_a.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_a.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_a.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  file_a.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, Extra, Field
+
+from . import file_b
+
+
+class PersonA(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    name: Optional[str] = Field(None, title='name')
+    pet: Optional[Union[file_b.Cat, file_b.Dog]] = Field(None, title='pet')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_b.py 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_b.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_b.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_b.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,41 @@
+# generated by datamodel-codegen:
+#   filename:  file_b.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any
+
+from pydantic import BaseModel, Extra
+
+
+class Model(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    __root__: Any
+
+
+class Pets(BaseModel):
+    __root__: Any
+
+
+class PetType(Enum):
+    Cat = 'Cat'
+
+
+class Cat(BaseModel):
+    pet_type: PetType
+    hunts: bool
+    age: str
+
+
+class PetType1(Enum):
+    Dog = 'Dog'
+
+
+class Dog(BaseModel):
+    pet_type: PetType1
+    bark: bool
+    breed: str
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_c.py 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_c.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_c.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_c.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  file_c.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, Extra, Field
+
+from . import file_b
+
+
+class PersonC(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    name: Optional[str] = Field(None, title='name')
+    pet: Optional[Union[file_b.Cat, file_b.Dog]] = Field(None, title='pet')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_self_ref/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files_self_ref/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_self_ref/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files_self_ref/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  multiple_files_self_ref
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_self_ref/base_test.py 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files_self_ref/base_test.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_self_ref/base_test.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files_self_ref/base_test.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  base_test.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    pass
+
+
+class Second(BaseModel):
+    __root__: str
+
+
+class First(BaseModel):
+    __root__: Second
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_self_ref/test.py 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files_self_ref/test.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_self_ref/test.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files_self_ref/test.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  test.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+from . import base_test
+
+
+class Model(BaseModel):
+    test_id: str = Field(..., description='test ID')
+    test_ip: base_test.First
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_self_ref_single.py 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files_self_ref_single.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_self_ref_single.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/multiple_files_self_ref_single.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  test.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Second(BaseModel):
+    __root__: str
+
+
+class First(BaseModel):
+    __root__: Second
+
+
+class Model(BaseModel):
+    test_id: str = Field(..., description='test ID')
+    test_ip: First
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_all_of.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_all_of.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_all_of.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_all_of.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  nested_all_of.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    first: str
+    second: str
+    third: str
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_deep/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_deep/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_deep/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_deep/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  nested_person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .empty_parent.nested import deep as deep_1
+from .nested import deep
+
+
+class NestedPerson(BaseModel):
+    nested_deep_childJson: Optional[deep.Json] = None
+    nested_deep_childAnother: Optional[deep.Another] = None
+    empty_parent_nested_deep_childJson: Optional[deep_1.Json] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_deep/empty_parent/nested/deep.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_deep/empty_parent/nested/deep.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_deep/empty_parent/nested/deep.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_deep/empty_parent/nested/deep.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  nested_person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Json(BaseModel):
+    firstName: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_deep/nested/deep.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_deep/nested/deep.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_deep/nested/deep.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_deep/nested/deep.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  nested_person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Json(BaseModel):
+    firstName: Optional[str] = None
+
+
+class Another(BaseModel):
+    firstName: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/coffee.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/coffee.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/coffee.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/coffee.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/drink/coffee.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class Coffee(Enum):
+    Black = 'Black'
+    Espresso = 'Espresso'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/tea.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/tea.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/tea.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/tea.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/drink/tea.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class Tea(Enum):
+    Oolong = 'Oolong'
+    Green = 'Green'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/food.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/food.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/food.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/food.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/food.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Noodle(Enum):
+    ramen = 'ramen'
+    spaghetti = 'spaghetti'
+
+
+class Soup(Enum):
+    bean = 'bean'
+    mushroom = 'mushroom'
+    tomato = 'tomato'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/friends.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/friends.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/friends.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/friends.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/friends.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import BaseModel, Extra, Field
+
+from . import food as food_1
+
+
+class Friend(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    name: str = Field(..., example='John Doe')
+    phone_number: Optional[str] = Field(None, example='(555) 555-1234')
+    food: Optional[List[Union[food_1.Noodle, food_1.Soup]]] = None
+
+
+class Friends(BaseModel):
+    __root__: List[Friend] = Field(..., title='Friends')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/machine/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/machine/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/machine/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/machine/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/machine/robot.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/machine/robot.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/machine/robot.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/machine/robot.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/machine/robot.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from ... import person
+from .. import food as food_1
+from ..drink import coffee
+from ..relative.animal.pet import pet as pet_1
+from ..relative.animal.pet.pet import Pet
+
+
+class Robot(Pet):
+    friends: Optional[person.Person] = None
+    drink: Optional[coffee.Coffee] = None
+    food: Optional[food_1.Noodle] = None
+    pet: Optional[pet_1.Pet] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/fur.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/fur.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/fur.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/fur.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/relative/animal/fur.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class Fur(Enum):
+    Short_hair = 'Short hair'
+    Long_hair = 'Long hair'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/pet/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/pet/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/pet/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/pet/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/pet/pet.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/pet/pet.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/pet/pet.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/pet/pet.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/relative/animal/pet/pet.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import fur as fur_1
+
+
+class Pet(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+    fur: Optional[fur_1.Fur] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/person.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/person.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/person.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_directory/person.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,30 @@
+# generated by datamodel-codegen:
+#   filename:  person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import BaseModel, Field, conint
+
+from .definitions import food as food_1
+from .definitions import friends as friends_1
+from .definitions.drink import coffee, tea
+from .definitions.machine import robot as robot_1
+from .definitions.relative.animal.pet import pet
+
+
+class Person(BaseModel):
+    first_name: str = Field(..., description="The person's first name.")
+    last_name: str = Field(..., description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(None, description='Age in years.')
+    pets: Optional[List[pet.Pet]] = None
+    friends: Optional[friends_1.Friends] = None
+    robot: Optional[robot_1.Robot] = None
+    comment: None = None
+    drink: Optional[List[Union[coffee.Coffee, tea.Tea]]] = None
+    food: Optional[List[Union[food_1.Noodle, food_1.Soup]]] = None
+
+
+Person.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_json_pointer.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_json_pointer.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_json_pointer.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_json_pointer.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,68 @@
+# generated by datamodel-codegen:
+#   filename:  nested_json_pointer.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Optional, Union
+
+from pydantic import BaseModel, Extra, Field
+
+
+class CatBreed(BaseModel):
+    __root__: Any
+
+
+class DogBreed(BaseModel):
+    __root__: Any
+
+
+class Pets(BaseModel):
+    __root__: Any
+
+
+class PetType(Enum):
+    Cat = 'Cat'
+
+
+class PetType1(Enum):
+    Dog = 'Dog'
+
+
+class C1(BaseModel):
+    hunts: Optional[bool] = None
+    age: Optional[str] = None
+
+
+class C2(BaseModel):
+    hunts: Optional[bool] = None
+    age: Optional[str] = None
+
+
+class D1(BaseModel):
+    bark: Optional[bool] = None
+    age: Optional[str] = None
+
+
+class D2(BaseModel):
+    hunts: Optional[bool] = None
+    age: Optional[str] = None
+
+
+class Cat(BaseModel):
+    pet_type: PetType
+    breed: Optional[Union[C1, C2]] = Field(None, title='breed')
+
+
+class Dog(BaseModel):
+    pet_type: PetType1
+    breed: Union[D1, D2] = Field(..., title='breed')
+
+
+class Person(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    name: Optional[str] = Field(None, title='name')
+    pet: Optional[Union[Cat, Dog]] = Field(None, title='pet')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_skip/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_skip/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  nested_skip.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class NestedSkip(BaseModel):
+    pass
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/a/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_skip/a/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/a/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_skip/a/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  nested_skip.json
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/a/b/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_skip/a/b/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/a/b/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_skip/a/b/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  nested_skip.json
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/a/b/c/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_skip/a/b/c/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/a/b/c/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_skip/a/b/c/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  nested_skip.json
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/a/b/c/d.py 0.34.0-1/tests/data/expected/main/jsonschema/nested_skip/a/b/c/d.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/a/b/c/d.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nested_skip/a/b/c/d.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  nested_skip.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class E(BaseModel):
+    example1: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/null.py 0.34.0-1/tests/data/expected/main/jsonschema/null.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/null.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/null.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  null.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    null: None = None
+    nullableString: Optional[str] = None
+    nullableNumber: Optional[Union[float, int]] = None
+    any: Optional[Any] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/null_and_array.py 0.34.0-1/tests/data/expected/main/jsonschema/null_and_array.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/null_and_array.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/null_and_array.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  null_and_array.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel
+
+
+class MyObjItem(BaseModel):
+    items: Optional[List[Any]]
+
+
+class Model(BaseModel):
+    my_obj: List[MyObjItem]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/null_and_array_v2.py 0.34.0-1/tests/data/expected/main/jsonschema/null_and_array_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/null_and_array_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/null_and_array_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  null_and_array.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel
+
+
+class MyObjItem(BaseModel):
+    items: Optional[List[Any]] = None
+
+
+class Model(BaseModel):
+    my_obj: List[MyObjItem]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nullable_any_of.py 0.34.0-1/tests/data/expected/main/jsonschema/nullable_any_of.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nullable_any_of.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nullable_any_of.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,31 @@
+# generated by datamodel-codegen:
+#   filename:  nullable_any_of.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Extra, Field
+
+
+class Config(BaseModel):
+    __root__: str = Field(..., description='d2', min_length=1, title='t2')
+
+
+class In(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    input_dataset_path: Optional[str] = Field(
+        None, description='d1', min_length=1, title='Path to the input dataset'
+    )
+    config: Optional[Config] = None
+
+
+class ValidatingSchemaId1(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    in_: Optional[In] = Field(None, alias='in')
+    n1: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nullable_any_of_use_union_operator.py 0.34.0-1/tests/data/expected/main/jsonschema/nullable_any_of_use_union_operator.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nullable_any_of_use_union_operator.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nullable_any_of_use_union_operator.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,29 @@
+# generated by datamodel-codegen:
+#   filename:  nullable_any_of.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Extra, Field
+
+
+class Config(BaseModel):
+    __root__: str = Field(..., description='d2', min_length=1, title='t2')
+
+
+class In(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    input_dataset_path: str | None = Field(
+        None, description='d1', min_length=1, title='Path to the input dataset'
+    )
+    config: Config | None = None
+
+
+class ValidatingSchemaId1(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    in_: In | None = Field(None, alias='in')
+    n1: int | None = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nullable_object.py 0.34.0-1/tests/data/expected/main/jsonschema/nullable_object.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nullable_object.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/nullable_object.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  nullable_object.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, Optional
+
+from pydantic import BaseModel, Extra, constr
+
+
+class Network(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    name: Optional[str] = None
+
+
+class Model(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    networks: Dict[constr(regex=r'^[a-zA-Z0-9._-]+$'), Optional[Network]]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/object_has_one_of.py 0.34.0-1/tests/data/expected/main/jsonschema/object_has_one_of.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/object_has_one_of.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/object_has_one_of.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,61 @@
+# generated by datamodel-codegen:
+#   filename:  object_has_one_of.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Union
+
+from pydantic import BaseModel, Extra, Field
+
+
+class Field1(Enum):
+    response_1 = 'response_1'
+
+
+class Field2(Enum):
+    response_a = 'response_a'
+
+
+class V2Test1(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    field_1: Field1
+    field_2: Field2
+
+
+class Field11(Enum):
+    response_2 = 'response_2'
+
+
+class Field21(Enum):
+    response_b = 'response_b'
+
+
+class V2Test2(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    field_1: Field11
+    field_2: Field21
+
+
+class Field22(Enum):
+    response_c = 'response_c'
+
+
+class V2Test3(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    field_1: Field11
+    field_2: Field22
+
+
+class V2Test(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    __root__: Union[V2Test1, Union[V2Test2, V2Test3]] = Field(..., title='v2_test')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/one_of_with_sub_schema_array_item.py 0.34.0-1/tests/data/expected/main/jsonschema/one_of_with_sub_schema_array_item.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/one_of_with_sub_schema_array_item.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/one_of_with_sub_schema_array_item.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  one_of_with_sub_schema_array_item.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class SpatialPlan(BaseModel):
+    officialDocument: Optional[Union[str, List[AnyUrl]]] = Field(
+        None,
+        description='Link to the official documents that relate to the spatial plan.',
+        title='officialDocument',
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/pattern.py 0.34.0-1/tests/data/expected/main/jsonschema/pattern.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/pattern.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/pattern.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,22 @@
+# generated by datamodel-codegen:
+#   filename:  pattern.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, constr
+
+
+class Info(BaseModel):
+    hostName: Optional[
+        constr(
+            regex=r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])\Z'
+        )
+    ] = None
+    arn: Optional[
+        constr(regex=r'(^arn:([^:]*):([^:]*):([^:]*):(|\*|[\d]{12}):(.+)$)|^\*$')
+    ] = None
+    tel: Optional[constr(regex=r'^(\([0-9]{3}\))?[0-9]{3}-[0-9]{4}$')] = None
+    comment: Optional[constr(regex=r'[^\b\f\n\r\t\\a+.?\'"|()]+$')] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/pattern_properties.py 0.34.0-1/tests/data/expected/main/jsonschema/pattern_properties.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/pattern_properties.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/pattern_properties.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  pattern_properties.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, Optional
+
+from pydantic import BaseModel, constr
+
+
+class Bar(BaseModel):
+    name: Optional[str] = None
+
+
+class Foo(BaseModel):
+    bar: Dict[constr(regex=r'^([a-zA-Z_][a-zA-Z0-9_]*)$'), Bar]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/pattern_properties_by_reference.py 0.34.0-1/tests/data/expected/main/jsonschema/pattern_properties_by_reference.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/pattern_properties_by_reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/pattern_properties_by_reference.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,34 @@
+# generated by datamodel-codegen:
+#   filename:  pattern_properties_by_reference.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, Optional
+
+from pydantic import BaseModel, Extra, Field, constr
+
+
+class Stt(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    timeout: Optional[float] = Field(None, title='STT Timeout')
+
+
+class TextResponse(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    __root__: Dict[constr(regex=r'^[a-z]{1}[0-9]{1}$'), Any]
+
+
+class SomeschemaSchema(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    KeyWithExplicitPatternProperties: Optional[
+        Dict[constr(regex=r'^[a-z]{1}[0-9]{1}$'), Any]
+    ] = None
+    KeyWithPatternPropertiesByReference: Optional[TextResponse] = None
+    SomeOtherBoringReference: Optional[Stt] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/pattern_properties_field_constraints.py 0.34.0-1/tests/data/expected/main/jsonschema/pattern_properties_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/pattern_properties_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/pattern_properties_field_constraints.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  pattern_properties.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, Optional
+
+from pydantic import BaseModel
+
+
+class Bar(BaseModel):
+    name: Optional[str] = None
+
+
+class Foo(BaseModel):
+    bar: Dict[str, Bar]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/removed_parent_class.py 0.34.0-1/tests/data/expected/main/jsonschema/removed_parent_class.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/removed_parent_class.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/removed_parent_class.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  pet.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    name: str
+    age: int
+
+
+class Model(BaseModel):
+    Pet: Pet
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field/referenced.py 0.34.0-1/tests/data/expected/main/jsonschema/require_referenced_field/referenced.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field/referenced.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/require_referenced_field/referenced.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  referenced.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import datetime
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    some_optional_property: Optional[str] = None
+    some_optional_typed_property: Optional[datetime] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field/required.py 0.34.0-1/tests/data/expected/main/jsonschema/require_referenced_field/required.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field/required.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/require_referenced_field/required.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  required.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import datetime
+
+from .referenced import Model as Model_1
+
+
+class Model(Model_1):
+    some_optional_property: str
+    some_optional_typed_property: datetime
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_msgspec/referenced.py 0.34.0-1/tests/data/expected/main/jsonschema/require_referenced_field_msgspec/referenced.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_msgspec/referenced.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/require_referenced_field_msgspec/referenced.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  referenced.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from msgspec import Struct
+
+
+class Model(Struct):
+    some_optional_property: Optional[str] = None
+    some_optional_typed_property: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_msgspec/required.py 0.34.0-1/tests/data/expected/main/jsonschema/require_referenced_field_msgspec/required.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_msgspec/required.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/require_referenced_field_msgspec/required.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  required.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from .referenced import Model as Model_1
+
+
+class Model(Model_1):
+    some_optional_property: str
+    some_optional_typed_property: str
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_naivedatetime/referenced.py 0.34.0-1/tests/data/expected/main/jsonschema/require_referenced_field_naivedatetime/referenced.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_naivedatetime/referenced.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/require_referenced_field_naivedatetime/referenced.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  referenced.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, NaiveDatetime
+
+
+class Model(BaseModel):
+    some_optional_property: Optional[str] = None
+    some_optional_typed_property: Optional[NaiveDatetime] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_naivedatetime/required.py 0.34.0-1/tests/data/expected/main/jsonschema/require_referenced_field_naivedatetime/required.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_naivedatetime/required.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/require_referenced_field_naivedatetime/required.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  required.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import NaiveDatetime
+
+from .referenced import Model as Model_1
+
+
+class Model(Model_1):
+    some_optional_property: str
+    some_optional_typed_property: NaiveDatetime
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_pydantic_v2/referenced.py 0.34.0-1/tests/data/expected/main/jsonschema/require_referenced_field_pydantic_v2/referenced.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_pydantic_v2/referenced.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/require_referenced_field_pydantic_v2/referenced.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  referenced.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AwareDatetime, BaseModel
+
+
+class Model(BaseModel):
+    some_optional_property: Optional[str] = None
+    some_optional_typed_property: Optional[AwareDatetime] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_pydantic_v2/required.py 0.34.0-1/tests/data/expected/main/jsonschema/require_referenced_field_pydantic_v2/required.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_pydantic_v2/required.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/require_referenced_field_pydantic_v2/required.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  required.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import AwareDatetime
+
+from .referenced import Model as Model_1
+
+
+class Model(Model_1):
+    some_optional_property: str
+    some_optional_typed_property: AwareDatetime
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/required_and_any_of_required.py 0.34.0-1/tests/data/expected/main/jsonschema/required_and_any_of_required.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/required_and_any_of_required.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/required_and_any_of_required.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  required_and_any_of_required.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel
+
+
+class Foo(BaseModel):
+    bar: int
+    baz: int
+    qux: Optional[int] = None
+
+
+class Foo1(BaseModel):
+    bar: int
+    baz: Optional[int] = None
+    qux: int
+
+
+class Model(BaseModel):
+    foo: Union[Foo, Foo1]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_id.py 0.34.0-1/tests/data/expected/main/jsonschema/root_id.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_id.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/root_id.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  root_id.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel, Field, conint
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Person(BaseModel):
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    lastName: Optional[str] = Field(None, description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[List] = None
+    comment: None = None
+
+
+class OriginalPerson(BaseModel):
+    __root__: Person
+
+
+class Pet(BaseModel):
+    name: Optional[str] = Field(None, examples=['dog', 'cat'])
+    owner: Optional[Person] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_id_absolute_url.py 0.34.0-1/tests/data/expected/main/jsonschema/root_id_absolute_url.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_id_absolute_url.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/root_id_absolute_url.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  root_id_absolute_url.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel, Field, conint
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Person(BaseModel):
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    lastName: Optional[str] = Field(None, description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[List] = None
+    comment: None = None
+
+
+class OriginalPerson(BaseModel):
+    __root__: Person
+
+
+class Pet(BaseModel):
+    name: Optional[str] = Field(None, examples=['dog', 'cat'])
+    owner: Optional[Person] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_in_enum.py 0.34.0-1/tests/data/expected/main/jsonschema/root_in_enum.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_in_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/root_in_enum.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  enum_in_root.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Model(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    order_reference: Optional[str] = Field(
+        None,
+        alias='orderReference',
+        description='Reference number of the order',
+        examples=['27378669'],
+    )
+    brand: Optional[Literal['OPUS', 'someday']] = Field(
+        None, description='purchased brand'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties.py 0.34.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,49 @@
+# generated by datamodel-codegen:
+#   filename:  root_model_with_additional_properties.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Dict, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class NestedObjectResult(BaseModel):
+    status: int
+
+
+class NestedEnumResult(Enum):
+    red = 'red'
+    green = 'green'
+
+
+class OneOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class AnyOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class User(BaseModel):
+    name: Optional[str] = None
+
+
+class AllOfResult(User):
+    description: Optional[str] = None
+
+
+class Model(BaseModel):
+    test_id: str = Field(..., description='test ID')
+    test_ip: str = Field(..., description='test IP')
+    result: Dict[str, int]
+    nested_object_result: Dict[str, NestedObjectResult]
+    nested_enum_result: Dict[str, NestedEnumResult]
+    all_of_result: Optional[Dict[str, AllOfResult]] = None
+    one_of_result: Optional[Dict[str, Union[User, OneOfResult]]] = None
+    any_of_result: Optional[Dict[str, Union[User, AnyOfResult]]] = None
+    all_of_with_unknown_object: Optional[Dict[str, User]] = None
+    objectRef: Optional[Dict[str, User]] = None
+    deepNestedObjectRef: Optional[Dict[str, Dict[str, Dict[str, User]]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties_custom_class_name.py 0.34.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties_custom_class_name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties_custom_class_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties_custom_class_name.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,49 @@
+# generated by datamodel-codegen:
+#   filename:  root_model_with_additional_properties.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Dict, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class CustomNestedObjectResult(BaseModel):
+    status: int
+
+
+class CustomNestedEnumResult(Enum):
+    red = 'red'
+    green = 'green'
+
+
+class CustomOneOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class CustomAnyOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class CustomUser(BaseModel):
+    name: Optional[str] = None
+
+
+class CustomAllOfResult(CustomUser):
+    description: Optional[str] = None
+
+
+class CustomModel(BaseModel):
+    test_id: str = Field(..., description='test ID')
+    test_ip: str = Field(..., description='test IP')
+    result: Dict[str, int]
+    nested_object_result: Dict[str, CustomNestedObjectResult]
+    nested_enum_result: Dict[str, CustomNestedEnumResult]
+    all_of_result: Optional[Dict[str, CustomAllOfResult]] = None
+    one_of_result: Optional[Dict[str, Union[CustomUser, CustomOneOfResult]]] = None
+    any_of_result: Optional[Dict[str, Union[CustomUser, CustomAnyOfResult]]] = None
+    all_of_with_unknown_object: Optional[Dict[str, CustomUser]] = None
+    objectRef: Optional[Dict[str, CustomUser]] = None
+    deepNestedObjectRef: Optional[Dict[str, Dict[str, Dict[str, CustomUser]]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties_literal.py 0.34.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties_literal.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties_literal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties_literal.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,43 @@
+# generated by datamodel-codegen:
+#   filename:  root_model_with_additional_properties.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class NestedObjectResult(BaseModel):
+    status: int
+
+
+class OneOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class AnyOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class User(BaseModel):
+    name: Optional[str] = None
+
+
+class AllOfResult(User):
+    description: Optional[str] = None
+
+
+class Model(BaseModel):
+    test_id: str = Field(..., description='test ID')
+    test_ip: str = Field(..., description='test IP')
+    result: Dict[str, int]
+    nested_object_result: Dict[str, NestedObjectResult]
+    nested_enum_result: Dict[str, Literal['red', 'green']]
+    all_of_result: Optional[Dict[str, AllOfResult]] = None
+    one_of_result: Optional[Dict[str, Union[User, OneOfResult]]] = None
+    any_of_result: Optional[Dict[str, Union[User, AnyOfResult]]] = None
+    all_of_with_unknown_object: Optional[Dict[str, User]] = None
+    objectRef: Optional[Dict[str, User]] = None
+    deepNestedObjectRef: Optional[Dict[str, Dict[str, Dict[str, User]]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties_use_generic_container_types.py 0.34.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties_use_generic_container_types.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties_use_generic_container_types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties_use_generic_container_types.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,49 @@
+# generated by datamodel-codegen:
+#   filename:  root_model_with_additional_properties.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Mapping, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class NestedObjectResult(BaseModel):
+    status: int
+
+
+class NestedEnumResult(Enum):
+    red = 'red'
+    green = 'green'
+
+
+class OneOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class AnyOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class User(BaseModel):
+    name: Optional[str] = None
+
+
+class AllOfResult(User):
+    description: Optional[str] = None
+
+
+class Model(BaseModel):
+    test_id: str = Field(..., description='test ID')
+    test_ip: str = Field(..., description='test IP')
+    result: Mapping[str, int]
+    nested_object_result: Mapping[str, NestedObjectResult]
+    nested_enum_result: Mapping[str, NestedEnumResult]
+    all_of_result: Optional[Mapping[str, AllOfResult]] = None
+    one_of_result: Optional[Mapping[str, Union[User, OneOfResult]]] = None
+    any_of_result: Optional[Mapping[str, Union[User, AnyOfResult]]] = None
+    all_of_with_unknown_object: Optional[Mapping[str, User]] = None
+    objectRef: Optional[Mapping[str, User]] = None
+    deepNestedObjectRef: Optional[Mapping[str, Mapping[str, Mapping[str, User]]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties_use_standard_collections.py 0.34.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties_use_standard_collections.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties_use_standard_collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties_use_standard_collections.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,49 @@
+# generated by datamodel-codegen:
+#   filename:  root_model_with_additional_properties.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class NestedObjectResult(BaseModel):
+    status: int
+
+
+class NestedEnumResult(Enum):
+    red = 'red'
+    green = 'green'
+
+
+class OneOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class AnyOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class User(BaseModel):
+    name: Optional[str] = None
+
+
+class AllOfResult(User):
+    description: Optional[str] = None
+
+
+class Model(BaseModel):
+    test_id: str = Field(..., description='test ID')
+    test_ip: str = Field(..., description='test IP')
+    result: dict[str, int]
+    nested_object_result: dict[str, NestedObjectResult]
+    nested_enum_result: dict[str, NestedEnumResult]
+    all_of_result: Optional[dict[str, AllOfResult]] = None
+    one_of_result: Optional[dict[str, Union[User, OneOfResult]]] = None
+    any_of_result: Optional[dict[str, Union[User, AnyOfResult]]] = None
+    all_of_with_unknown_object: Optional[dict[str, User]] = None
+    objectRef: Optional[dict[str, User]] = None
+    deepNestedObjectRef: Optional[dict[str, dict[str, dict[str, User]]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/same_name_objects.py 0.34.0-1/tests/data/expected/main/jsonschema/same_name_objects.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/same_name_objects.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/same_name_objects.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  same_name_objects.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List
+
+from pydantic import BaseModel, Extra
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Friends(BaseModel):
+    pass
+
+    class Config:
+        extra = Extra.forbid
+
+
+class FriendsModel(BaseModel):
+    __root__: List
+
+
+class Tst2(BaseModel):
+    __root__: FriendsModel
+
+
+class Tst1(BaseModel):
+    __root__: FriendsModel
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/self_reference.py 0.34.0-1/tests/data/expected/main/jsonschema/self_reference.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/self_reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/self_reference.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  self_reference.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Pet(BaseModel):
+    name: Optional[str] = Field(None, examples=['dog', 'cat'])
+    friends: Optional[List[Pet]] = None
+
+
+Pet.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/similar_nested_array.py 0.34.0-1/tests/data/expected/main/jsonschema/similar_nested_array.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/similar_nested_array.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/similar_nested_array.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,66 @@
+# generated by datamodel-codegen:
+#   filename:  similar_nested_array.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Datum(BaseModel):
+    keyA: Optional[str] = None
+
+
+class ObjectA(BaseModel):
+    data: Optional[List[Datum]] = None
+
+
+class Datum1(BaseModel):
+    keyB: Optional[str] = None
+
+
+class ObjectB(BaseModel):
+    data: Optional[List[Datum1]] = None
+
+
+class KeyC(BaseModel):
+    nestedA: Optional[str] = None
+
+
+class KeyC1(BaseModel):
+    nestedB: Optional[str] = None
+
+
+class ObjectC(BaseModel):
+    keyC: Optional[Union[KeyC, KeyC1]] = None
+
+
+class KeyCItem(BaseModel):
+    nestedA: Optional[str] = None
+
+
+class KeyCItem1(BaseModel):
+    nestedB: Optional[str] = None
+
+
+class KeyCEnum(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class KeyCEnum1(Enum):
+    orange = 'orange'
+    apple = 'apple'
+    milk = 'milk'
+
+
+class ObjectD(BaseModel):
+    keyC: Optional[List[Union[KeyCItem, KeyCItem1, KeyCEnum, KeyCEnum1]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/space_field_enum_snake_case_field.py 0.34.0-1/tests/data/expected/main/jsonschema/space_field_enum_snake_case_field.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/space_field_enum_snake_case_field.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/space_field_enum_snake_case_field.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  space_field_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class SpaceIF(Enum):
+    space_field = 'Space Field'
+
+
+class Model(BaseModel):
+    space_if: Optional[SpaceIF] = Field(None, alias='SpaceIF')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/special_enum.py 0.34.0-1/tests/data/expected/main/jsonschema/special_enum.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/special_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/special_enum.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  special_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ModelEnum(Enum):
+    True_ = True
+    False_ = False
+    field_ = ''
+    field__1 = '\n'
+    field__ = '\r\n'
+    field__2 = '\t'
+    field__3 = '\b'
+    field__4 = '\\'
+
+
+class Model(BaseModel):
+    __root__: Optional[ModelEnum] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/special_enum_empty_enum_field_name.py 0.34.0-1/tests/data/expected/main/jsonschema/special_enum_empty_enum_field_name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/special_enum_empty_enum_field_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/special_enum_empty_enum_field_name.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  special_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ModelEnum(Enum):
+    True_ = True
+    False_ = False
+    empty = ''
+    field_ = '\n'
+    field__ = '\r\n'
+    field__1 = '\t'
+    field__2 = '\b'
+    field__3 = '\\'
+
+
+class Model(BaseModel):
+    __root__: Optional[ModelEnum] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/special_enum_special_field_name_prefix.py 0.34.0-1/tests/data/expected/main/jsonschema/special_enum_special_field_name_prefix.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/special_enum_special_field_name_prefix.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/special_enum_special_field_name_prefix.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  special_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ModelEnum(Enum):
+    True_ = True
+    False_ = False
+    special_ = ''
+    special__1 = '\n'
+    special__ = '\r\n'
+    special__2 = '\t'
+    special__3 = '\b'
+    special__4 = '\\'
+
+
+class Model(BaseModel):
+    __root__: Optional[ModelEnum] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/special_enum_special_field_name_prefix_keep_private.py 0.34.0-1/tests/data/expected/main/jsonschema/special_enum_special_field_name_prefix_keep_private.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/special_enum_special_field_name_prefix_keep_private.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/special_enum_special_field_name_prefix_keep_private.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  special_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ModelEnum(Enum):
+    True_ = True
+    False_ = False
+    _ = ''
+    __1 = '\n'
+    __ = '\r\n'
+    __2 = '\t'
+    __3 = '\b'
+    __4 = '\\'
+
+
+class Model(BaseModel):
+    __root__: Optional[ModelEnum] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/special_field_name.py 0.34.0-1/tests/data/expected/main/jsonschema/special_field_name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/special_field_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/special_field_name.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  special_field_name.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class SpecialField(BaseModel):
+    global_: Optional[str] = Field(None, alias='global')
+    with_: Optional[str] = Field(None, alias='with')
+    class_: Optional[int] = Field(None, alias='class')
+    class_s: Optional[int] = Field(None, alias="class's")
+    class_s_1: Optional[str] = Field(None, alias='class-s')
+    field_: Optional[str] = Field(None, alias='#')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/special_model_remove_special_field_name_prefix.py 0.34.0-1/tests/data/expected/main/jsonschema/special_model_remove_special_field_name_prefix.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/special_model_remove_special_field_name_prefix.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/special_model_remove_special_field_name_prefix.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  special_prefix_model.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Model(BaseModel):
+    id: AnyUrl = Field(..., alias='@id', title='Id must be presesnt and must be a URI')
+    type: str = Field(..., alias='@type')
+    type_1: Optional[str] = Field(None, alias='@+!type')
+    type_2: Optional[str] = Field(None, alias='@-!type')
+    profile: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/strict_types.py 0.34.0-1/tests/data/expected/main/jsonschema/strict_types.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/strict_types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/strict_types.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  strict_types.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field, conbytes, confloat, conint, constr
+
+
+class User(BaseModel):
+    name: Optional[str] = Field(None, example='ken')
+    age: Optional[int] = None
+    salary: Optional[conint(ge=0)] = None
+    debt: Optional[conint(le=0)] = None
+    loan: Optional[confloat(le=0.0)] = None
+    tel: Optional[constr(regex=r'^(\([0-9]{3}\))?[0-9]{3}-[0-9]{4}$')] = None
+    height: Optional[confloat(ge=0.0)] = None
+    weight: Optional[confloat(ge=0.0)] = None
+    score: Optional[confloat(ge=1e-08)] = None
+    active: Optional[bool] = None
+    photo: Optional[conbytes(min_length=100)] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/strict_types_all.py 0.34.0-1/tests/data/expected/main/jsonschema/strict_types_all.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/strict_types_all.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/strict_types_all.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,35 @@
+# generated by datamodel-codegen:
+#   filename:  strict_types.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import (
+    BaseModel,
+    Field,
+    StrictBool,
+    StrictBytes,
+    StrictInt,
+    StrictStr,
+    confloat,
+    conint,
+    constr,
+)
+
+
+class User(BaseModel):
+    name: Optional[StrictStr] = Field(None, example='ken')
+    age: Optional[StrictInt] = None
+    salary: Optional[conint(ge=0, strict=True)] = None
+    debt: Optional[conint(le=0, strict=True)] = None
+    loan: Optional[confloat(le=0.0, strict=True)] = None
+    tel: Optional[
+        constr(regex=r'^(\([0-9]{3}\))?[0-9]{3}-[0-9]{4}$', strict=True)
+    ] = None
+    height: Optional[confloat(ge=0.0, strict=True)] = None
+    weight: Optional[confloat(ge=0.0, strict=True)] = None
+    score: Optional[confloat(ge=1e-08, strict=True)] = None
+    active: Optional[StrictBool] = None
+    photo: Optional[StrictBytes] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/strict_types_all_field_constraints.py 0.34.0-1/tests/data/expected/main/jsonschema/strict_types_all_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/strict_types_all_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/strict_types_all_field_constraints.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,31 @@
+# generated by datamodel-codegen:
+#   filename:  strict_types.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import (
+    BaseModel,
+    Field,
+    StrictBool,
+    StrictBytes,
+    StrictFloat,
+    StrictInt,
+    StrictStr,
+)
+
+
+class User(BaseModel):
+    name: Optional[StrictStr] = Field(None, example='ken')
+    age: Optional[StrictInt] = None
+    salary: Optional[StrictInt] = None
+    debt: Optional[StrictInt] = None
+    loan: Optional[StrictFloat] = None
+    tel: Optional[StrictStr] = None
+    height: Optional[StrictFloat] = None
+    weight: Optional[StrictFloat] = None
+    score: Optional[StrictFloat] = None
+    active: Optional[StrictBool] = None
+    photo: Optional[StrictBytes] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/string_dict.py 0.34.0-1/tests/data/expected/main/jsonschema/string_dict.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/string_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/string_dict.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  string_dict.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict
+
+from pydantic import BaseModel
+
+
+class MyStringDict(BaseModel):
+    __root__: Dict[str, str]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/subclass_enum.py 0.34.0-1/tests/data/expected/main/jsonschema/subclass_enum.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/subclass_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/subclass_enum.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,53 @@
+# generated by datamodel-codegen:
+#   filename:  subclass_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class IntEnum(int, Enum):
+    integer_1 = 1
+    integer_2 = 2
+    integer_3 = 3
+
+
+class FloatEnum(float, Enum):
+    number_1_1 = 1.1
+    number_2_1 = 2.1
+    number_3_1 = 3.1
+
+
+class StrEnum(str, Enum):
+    field_1 = '1'
+    field_2 = '2'
+    field_3 = '3'
+
+
+class NonTypedEnum(Enum):
+    field_1 = '1'
+    field_2 = '2'
+    field_3 = '3'
+
+
+class BooleanEnum(Enum):
+    boolean_True = True
+    boolean_False = False
+
+
+class UnknownEnum(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class Model(BaseModel):
+    IntEnum: Optional[IntEnum] = None
+    FloatEnum: Optional[FloatEnum] = None
+    StrEnum: Optional[StrEnum] = None
+    NonTypedEnum: Optional[NonTypedEnum] = None
+    BooleanEnum: Optional[BooleanEnum] = None
+    UnknownEnum: Optional[UnknownEnum] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/titles.py 0.34.0-1/tests/data/expected/main/jsonschema/titles.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/titles.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/titles.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,57 @@
+# generated by datamodel-codegen:
+#   filename:  titles.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class ProcessingStatus(Enum):
+    COMPLETED = 'COMPLETED'
+    PENDING = 'PENDING'
+    FAILED = 'FAILED'
+
+
+class Kind(BaseModel):
+    __root__: str
+
+
+class ExtendedProcessingTask1(BaseModel):
+    comment: Optional[str] = None
+
+
+class ProcessingStatusUnion(BaseModel):
+    id: Optional[int] = None
+    description: Optional[str] = None
+
+
+class ProcessingTasksTitle(BaseModel):
+    __root__: List[ProcessingTask] = Field(..., title='Processing Tasks Title')
+
+
+class ExtendedProcessingTask(BaseModel):
+    __root__: Union[ProcessingTasksTitle, ExtendedProcessingTask1] = Field(
+        ..., title='Extended Processing Task Title'
+    )
+
+
+class ExtendedProcessingTasks(BaseModel):
+    __root__: List[ExtendedProcessingTask] = Field(
+        ..., title='Extended Processing Tasks Title'
+    )
+
+
+class ProcessingTask(BaseModel):
+    processing_status_union: Optional[
+        Union[ProcessingStatusUnion, ExtendedProcessingTask, ProcessingStatus]
+    ] = Field('COMPLETED', title='Processing Status Union Title')
+    processing_status: Optional[ProcessingStatus] = 'COMPLETED'
+    name: Optional[str] = None
+    kind: Optional[Kind] = None
+
+
+ProcessingTasksTitle.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/titles_use_title_as_name.py 0.34.0-1/tests/data/expected/main/jsonschema/titles_use_title_as_name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/titles_use_title_as_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/titles_use_title_as_name.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,57 @@
+# generated by datamodel-codegen:
+#   filename:  titles.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class ProcessingStatusTitle(Enum):
+    COMPLETED = 'COMPLETED'
+    PENDING = 'PENDING'
+    FAILED = 'FAILED'
+
+
+class Kind(BaseModel):
+    __root__: str
+
+
+class NestedCommentTitle(BaseModel):
+    comment: Optional[str] = None
+
+
+class ProcessingStatusDetail(BaseModel):
+    id: Optional[int] = None
+    description: Optional[str] = None
+
+
+class ProcessingTasksTitle(BaseModel):
+    __root__: List[ProcessingTaskTitle] = Field(..., title='Processing Tasks Title')
+
+
+class ExtendedProcessingTask(BaseModel):
+    __root__: Union[ProcessingTasksTitle, NestedCommentTitle] = Field(
+        ..., title='Extended Processing Task Title'
+    )
+
+
+class ExtendedProcessingTasksTitle(BaseModel):
+    __root__: List[ExtendedProcessingTask] = Field(
+        ..., title='Extended Processing Tasks Title'
+    )
+
+
+class ProcessingTaskTitle(BaseModel):
+    processing_status_union: Optional[
+        Union[ProcessingStatusDetail, ExtendedProcessingTask, ProcessingStatusTitle]
+    ] = Field('COMPLETED', title='Processing Status Union Title')
+    processing_status: Optional[ProcessingStatusTitle] = 'COMPLETED'
+    name: Optional[str] = None
+    kind: Optional[Kind] = None
+
+
+ProcessingTasksTitle.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  treat_dot_as_module
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  treat_dot_as_module
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  treat_dot_as_module
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  treat_dot_as_module
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/basic_enum.py 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/basic_enum.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/basic_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/basic_enum.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  complex.directory/api.path.basic_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ProcessingStatus(Enum):
+    COMPLETED = 'COMPLETED'
+    PENDING = 'PENDING'
+    FAILED = 'FAILED'
+
+
+class BasicEnum(BaseModel):
+    processing_status: Optional[ProcessingStatus] = 'COMPLETED'
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/input.py 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/input.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/input.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/input.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  complex.directory/api.path.input.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel, Field
+
+from ... import schema
+
+
+class Input(BaseModel):
+    input: Optional[Any] = Field('input', title='Input')
+    extType: Optional[schema.ExtType] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/output.py 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/output.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  complex.directory/api.path.output.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Output(BaseModel):
+    output: Optional[Any] = Field('output', title='Output')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/schema.py 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/schema.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  complex.directory/schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel, Field
+
+
+class ExtType(BaseModel):
+    ExtType: Optional[Any] = Field(None, title='ExtType')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  treat_dot_as_module
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_basic_enum.py 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_basic_enum.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_basic_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_basic_enum.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  complex.directory/api.path.basic_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ProcessingStatus(Enum):
+    COMPLETED = 'COMPLETED'
+    PENDING = 'PENDING'
+    FAILED = 'FAILED'
+
+
+class BasicEnum(BaseModel):
+    processing_status: Optional[ProcessingStatus] = 'COMPLETED'
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_input.py 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_input.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_input.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_input.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  complex.directory/api.path.input.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel, Field
+
+from . import schema
+
+
+class Input(BaseModel):
+    input: Optional[Any] = Field('input', title='Input')
+    extType: Optional[schema.ExtType] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_output.py 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_output.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  complex.directory/api.path.output.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Output(BaseModel):
+    output: Optional[Any] = Field('output', title='Output')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/schema.py 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/schema.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  complex.directory/schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel, Field
+
+
+class ExtType(BaseModel):
+    ExtType: Optional[Any] = Field(None, title='ExtType')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/typed_dict_const.py 0.34.0-1/tests/data/expected/main/jsonschema/typed_dict_const.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/typed_dict_const.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/typed_dict_const.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  const.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, TypedDict
+
+
+class Const(TypedDict):
+    foo: Literal['foo']
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/typed_dict_not_required_nullable.py 0.34.0-1/tests/data/expected/main/jsonschema/typed_dict_not_required_nullable.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/typed_dict_not_required_nullable.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/typed_dict_not_required_nullable.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  not_required_nullable.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import NotRequired, Optional, TypedDict
+
+
+class Person(TypedDict):
+    name: str
+    null_name: NotRequired[Optional[str]]
+    age: NotRequired[int]
+    null_age: Optional[int]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/typed_dict_special_field_name_with_inheritance_model.py 0.34.0-1/tests/data/expected/main/jsonschema/typed_dict_special_field_name_with_inheritance_model.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/typed_dict_special_field_name_with_inheritance_model.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/typed_dict_special_field_name_with_inheritance_model.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,30 @@
+# generated by datamodel-codegen:
+#   filename:  special_field_name_with_inheritance_model.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import NotRequired, TypedDict
+
+
+class NestedBase(TypedDict):
+    age: NotRequired[str]
+
+
+class Base(NestedBase):
+    name: NotRequired[str]
+
+
+SpecialField = TypedDict(
+    'SpecialField',
+    {
+        'age': NotRequired[str],
+        'name': NotRequired[str],
+        'global': NotRequired[str],
+        'with': NotRequired[str],
+        'class': NotRequired[int],
+        'class\'s': NotRequired[int],
+        'class-s': NotRequired[str],
+        '#': NotRequired[str],
+    },
+)
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/typed_dict_with_only_additional_properties.py 0.34.0-1/tests/data/expected/main/jsonschema/typed_dict_with_only_additional_properties.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/typed_dict_with_only_additional_properties.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/typed_dict_with_only_additional_properties.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,9 @@
+# generated by datamodel-codegen:
+#   filename:  string_dict.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict
+
+MyStringDict = Dict[str, str]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_default_with_const.py 0.34.0-1/tests/data/expected/main/jsonschema/use_default_with_const.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_default_with_const.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/use_default_with_const.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  use_default_with_const.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal
+
+from pydantic import BaseModel
+
+
+class UseDefaultWithConst(BaseModel):
+    foo: Literal['foo'] = 'foo'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/coffee.py 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/coffee.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/coffee.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/coffee.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/drink/coffee.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class Coffee(Enum):
+    Black = 'Black'
+    Espresso = 'Espresso'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/tea.py 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/tea.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/tea.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/tea.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/drink/tea.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class Tea(Enum):
+    Oolong = 'Oolong'
+    Green = 'Green'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/food.py 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/food.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/food.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/food.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/food.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Noodle(Enum):
+    ramen = 'ramen'
+    spaghetti = 'spaghetti'
+
+
+class Soup(Enum):
+    bean = 'bean'
+    mushroom = 'mushroom'
+    tomato = 'tomato'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/friends.py 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/friends.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/friends.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/friends.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/friends.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import BaseModel, Extra, Field
+
+from . import food as food_1
+
+
+class Friend(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    name: str = Field(..., example='John Doe')
+    phone_number: str | None = Field(None, example='(555) 555-1234')
+    food: List[food_1.Noodle | food_1.Soup] | None = None
+
+
+class Friends(BaseModel):
+    __root__: List[Friend] = Field(..., title='Friends')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/machine/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/machine/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/machine/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/machine/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/machine/robot.py 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/machine/robot.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/machine/robot.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/machine/robot.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/machine/robot.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from ... import person
+from .. import food as food_1
+from ..drink import coffee
+from ..relative.animal.pet import pet as pet_1
+from ..relative.animal.pet.pet import Pet
+
+
+class Robot(Pet):
+    friends: person.Person | None = None
+    drink: coffee.Coffee | None = None
+    food: food_1.Noodle | None = None
+    pet: pet_1.Pet | None = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/fur.py 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/fur.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/fur.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/fur.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/relative/animal/fur.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class Fur(Enum):
+    Short_hair = 'Short hair'
+    Long_hair = 'Long hair'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/pet/__init__.py 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/pet/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/pet/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/pet/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/pet/pet.py 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/pet/pet.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/pet/pet.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/pet/pet.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/relative/animal/pet/pet.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+from .. import fur as fur_1
+
+
+class Pet(BaseModel):
+    name: str | None = None
+    age: int | None = None
+    fur: fur_1.Fur | None = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/person.py 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/person.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/person.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/use_union_operator/person.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,30 @@
+# generated by datamodel-codegen:
+#   filename:  person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import BaseModel, Field, conint
+
+from .definitions import food as food_1
+from .definitions import friends as friends_1
+from .definitions.drink import coffee, tea
+from .definitions.machine import robot as robot_1
+from .definitions.relative.animal.pet import pet
+
+
+class Person(BaseModel):
+    first_name: str = Field(..., description="The person's first name.")
+    last_name: str = Field(..., description="The person's last name.")
+    age: conint(ge=0) | None = Field(None, description='Age in years.')
+    pets: List[pet.Pet] | None = None
+    friends: friends_1.Friends | None = None
+    robot: robot_1.Robot | None = None
+    comment: None = None
+    drink: List[coffee.Coffee | tea.Tea] | None = None
+    food: List[food_1.Noodle | food_1.Soup] | None = None
+
+
+Person.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/without_titles_use_title_as_name.py 0.34.0-1/tests/data/expected/main/jsonschema/without_titles_use_title_as_name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/without_titles_use_title_as_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/jsonschema/without_titles_use_title_as_name.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,53 @@
+# generated by datamodel-codegen:
+#   filename:  without_titles.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class ProcessingStatus(Enum):
+    COMPLETED = 'COMPLETED'
+    PENDING = 'PENDING'
+    FAILED = 'FAILED'
+
+
+class Kind(BaseModel):
+    __root__: str
+
+
+class ExtendedProcessingTask1(BaseModel):
+    comment: Optional[str] = None
+
+
+class ProcessingStatusUnion(BaseModel):
+    id: Optional[int] = None
+    description: Optional[str] = None
+
+
+class Model(BaseModel):
+    __root__: List[ProcessingTask]
+
+
+class ExtendedProcessingTask(BaseModel):
+    __root__: Union[Model, ExtendedProcessingTask1]
+
+
+class ExtendedProcessingTasks(BaseModel):
+    __root__: List[ExtendedProcessingTask]
+
+
+class ProcessingTask(BaseModel):
+    processing_status_union: Optional[
+        Union[ProcessingStatusUnion, ExtendedProcessingTask, ProcessingStatus]
+    ] = 'COMPLETED'
+    processing_status: Optional[ProcessingStatus] = 'COMPLETED'
+    name: Optional[str] = None
+    kind: Optional[Kind] = None
+
+
+Model.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/main_jsonschema_with_custom_formatters/output.py 0.34.0-1/tests/data/expected/main/main_jsonschema_with_custom_formatters/output.py
--- 0.26.4-3/tests/data/expected/main/main_jsonschema_with_custom_formatters/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/main_jsonschema_with_custom_formatters/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+# MIT License
+# 
+# Copyright (c) 2023 Blah-blah
+# 
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field, conint
+
+
+class Person(BaseModel):
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    lastName: Optional[str] = Field(None, description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[List] = None
+    comment: None = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/additional_properties.py 0.34.0-1/tests/data/expected/main/openapi/additional_properties.py
--- 0.26.4-3/tests/data/expected/main/openapi/additional_properties.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/additional_properties.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,110 @@
+# generated by datamodel-codegen:
+#   filename:  additional_properties.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, List, Optional
+
+from pydantic import BaseModel, Extra, Field
+
+
+class Pet(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    __root__: str
+
+
+class Rules(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    code: int
+    message: str
+
+
+class Event(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    event: Optional[Event] = None
+
+
+class Broken(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    foo: Optional[str] = None
+    bar: Optional[int] = None
+
+
+class BrokenArray(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    broken: Optional[Dict[str, List[Broken]]] = None
+
+
+class FileSetUpload(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    task_id: Optional[str] = Field(None, title='task id')
+    tags: Dict[str, List[str]] = Field(
+        ..., title='Dict of tags, each containing a list of file names'
+    )
+
+
+class Test(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    broken: Optional[Dict[str, Broken]] = None
+    failing: Optional[Dict[str, str]] = {}
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/all_of_with_relative_ref.py 0.34.0-1/tests/data/expected/main/openapi/all_of_with_relative_ref.py
--- 0.26.4-3/tests/data/expected/main/openapi/all_of_with_relative_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/all_of_with_relative_ref.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,43 @@
+# generated by datamodel-codegen:
+#   filename:  openapi.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel, ConfigDict, RootModel
+
+
+class Animal(BaseModel):
+    model_config = ConfigDict(
+        extra='forbid',
+    )
+    kind: Optional[Kind] = None
+    """
+    The kind of the animal
+    """
+
+
+class Animals(RootModel[Animal]):
+    root: Animal
+
+
+class Kind(Enum):
+    CAT = 'CAT'
+    DOG = 'DOG'
+
+
+class Pet(BaseModel):
+    model_config = ConfigDict(
+        extra='forbid',
+    )
+    kind: Optional[Kind] = None
+    """
+    The kind of the pet
+    """
+
+
+class Pets(RootModel[Pet]):
+    root: Pet
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_required.py 0.34.0-1/tests/data/expected/main/openapi/allof_required.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_required.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/allof_required.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  allof_required.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, constr
+
+
+class Foo(BaseModel):
+    a: str
+    b: str
+
+
+class Bar(Foo):
+    type: constr(regex=r'service')
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allow_extra_fields.py 0.34.0-1/tests/data/expected/main/openapi/allow_extra_fields.py
--- 0.26.4-3/tests/data/expected/main/openapi/allow_extra_fields.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/allow_extra_fields.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,102 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Extra, Field
+
+
+class Pet(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    __root__: str
+
+
+class Rules(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allow_extra_fields_pydantic_v2.py 0.34.0-1/tests/data/expected/main/openapi/allow_extra_fields_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/allow_extra_fields_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/allow_extra_fields_pydantic_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,87 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, ConfigDict, Field, RootModel
+
+
+class Pet(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(RootModel[List[Pet]]):
+    root: List[Pet]
+
+
+class User(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(RootModel[List[User]]):
+    root: List[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[List[str]]):
+    root: List[str]
+
+
+class Error(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[List[Api]]):
+    root: List[Api]
+
+
+class Event(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allow_population_by_field_name.py 0.34.0-1/tests/data/expected/main/openapi/allow_population_by_field_name.py
--- 0.26.4-3/tests/data/expected/main/openapi/allow_population_by_field_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/allow_population_by_field_name.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,102 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    __root__: str
+
+
+class Rules(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allow_population_by_field_name_pydantic_v2.py 0.34.0-1/tests/data/expected/main/openapi/allow_population_by_field_name_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/allow_population_by_field_name_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/allow_population_by_field_name_pydantic_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,102 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, ConfigDict, Field, RootModel
+
+
+class Pet(BaseModel):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(RootModel[List[Pet]]):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    root: List[Pet]
+
+
+class User(BaseModel):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(RootModel[List[User]]):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    root: List[User]
+
+
+class Id(RootModel[str]):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    root: str
+
+
+class Rules(RootModel[List[str]]):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    root: List[str]
+
+
+class Error(BaseModel):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[List[Api]]):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    root: List[Api]
+
+
+class Event(BaseModel):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/base_class.py 0.34.0-1/tests/data/expected/main/openapi/base_class.py
--- 0.26.4-3/tests/data/expected/main/openapi/base_class.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/base_class.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,71 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, Field
+
+from custom_module import Base
+
+
+class Pet(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(Base):
+    __root__: List[Pet]
+
+
+class User(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(Base):
+    __root__: List[User]
+
+
+class Id(Base):
+    __root__: str
+
+
+class Rules(Base):
+    __root__: List[str]
+
+
+class Error(Base):
+    code: int
+    message: str
+
+
+class Api(Base):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(Base):
+    __root__: List[Api]
+
+
+class Event(Base):
+    name: Optional[str] = None
+
+
+class Result(Base):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/body_and_parameters/general.py 0.34.0-1/tests/data/expected/main/openapi/body_and_parameters/general.py
--- 0.26.4-3/tests/data/expected/main/openapi/body_and_parameters/general.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/body_and_parameters/general.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,66 @@
+# generated by datamodel-codegen:
+#   filename:  body_and_parameters.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import datetime
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class PetForm(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class PetsGetResponse(BaseModel):
+    __root__: List[Pet]
+
+
+class FoodFoodIdGetResponse(BaseModel):
+    __root__: List[int]
+
+
+class UserGetResponse(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UserPostRequest(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersGetResponseItem(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersGetResponse(BaseModel):
+    __root__: List[UsersGetResponseItem]
+
+
+class UsersPostRequestItem(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersPostRequest(BaseModel):
+    __root__: List[UsersPostRequestItem]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/body_and_parameters/only_paths.py 0.34.0-1/tests/data/expected/main/openapi/body_and_parameters/only_paths.py
--- 0.26.4-3/tests/data/expected/main/openapi/body_and_parameters/only_paths.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/body_and_parameters/only_paths.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,56 @@
+# generated by datamodel-codegen:
+#   filename:  body_and_parameters.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import datetime
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class FoodFoodIdGetResponse(BaseModel):
+    __root__: List[int]
+
+
+class UserGetResponse(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UserPostRequest(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersGetResponseItem(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersGetResponse(BaseModel):
+    __root__: List[UsersGetResponseItem]
+
+
+class UsersPostRequestItem(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersPostRequest(BaseModel):
+    __root__: List[UsersPostRequestItem]
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class PetsGetResponse(BaseModel):
+    __root__: List[Pet]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/body_and_parameters/only_schemas.py 0.34.0-1/tests/data/expected/main/openapi/body_and_parameters/only_schemas.py
--- 0.26.4-3/tests/data/expected/main/openapi/body_and_parameters/only_schemas.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/body_and_parameters/only_schemas.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  body_and_parameters.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class PetForm(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/body_and_parameters/remote_ref.py 0.34.0-1/tests/data/expected/main/openapi/body_and_parameters/remote_ref.py
--- 0.26.4-3/tests/data/expected/main/openapi/body_and_parameters/remote_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/body_and_parameters/remote_ref.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  body_and_parameters_remote_ref.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class PetForm(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class PetsGetResponse(BaseModel):
+    __root__: List[Pet]
+
+
+class FoodFoodIdGetResponse(BaseModel):
+    __root__: List[int]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/collapse_root_models.py 0.34.0-1/tests/data/expected/main/openapi/collapse_root_models.py
--- 0.26.4-3/tests/data/expected/main/openapi/collapse_root_models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/collapse_root_models.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  not_real_string.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field, constr
+
+
+class Tweet(BaseModel):
+    author_id: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[str]
+
+
+class FileRequest(BaseModel):
+    file_hash: constr(regex=r'^[a-fA-F\d]{32}$', min_length=32, max_length=32) = Field(
+        ..., description='For file'
+    )
+
+
+class ImageRequest(BaseModel):
+    image_hash: Optional[
+        constr(regex=r'^[a-fA-F\d]{32}$', min_length=32, max_length=32)
+    ] = Field(None, description='For image')
+
+
+class FileHashes(BaseModel):
+    __root__: List[constr(regex=r'^[a-fA-F\d]{32}$', min_length=32, max_length=32)]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/collapse_root_models_field_constraints.py 0.34.0-1/tests/data/expected/main/openapi/collapse_root_models_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/openapi/collapse_root_models_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/collapse_root_models_field_constraints.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,51 @@
+# generated by datamodel-codegen:
+#   filename:  not_real_string.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Tweet(BaseModel):
+    author_id: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[str]
+
+
+class FileHash(BaseModel):
+    __root__: str = Field(
+        ...,
+        description='For file',
+        max_length=32,
+        min_length=32,
+        regex='^[a-fA-F\\d]{32}$',
+    )
+
+
+class FileRequest(BaseModel):
+    file_hash: str = Field(
+        ...,
+        description='For file',
+        max_length=32,
+        min_length=32,
+        regex='^[a-fA-F\\d]{32}$',
+    )
+
+
+class ImageRequest(BaseModel):
+    image_hash: Optional[str] = Field(
+        None,
+        description='For image',
+        max_length=64,
+        min_length=64,
+        regex='^[a-fA-F\\d]{32}$',
+    )
+
+
+class FileHashes(BaseModel):
+    __root__: List[FileHash]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/collapse_root_models_with_references_to_flat_types.py 0.34.0-1/tests/data/expected/main/openapi/collapse_root_models_with_references_to_flat_types.py
--- 0.26.4-3/tests/data/expected/main/openapi/collapse_root_models_with_references_to_flat_types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/collapse_root_models_with_references_to_flat_types.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  flat_type.jsonschema
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class FooModel(BaseModel):
+    foo: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/complex_reference.py 0.34.0-1/tests/data/expected/main/openapi/complex_reference.py
--- 0.26.4-3/tests/data/expected/main/openapi/complex_reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/complex_reference.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,41 @@
+# generated by datamodel-codegen:
+#   filename:  complex_reference.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class A(BaseModel):
+    a_property: Optional[B1] = None
+
+
+class B1(A):
+    pass
+
+
+class B2(A):
+    pass
+
+
+class C1(B1):
+    pass
+
+
+class D1(C1):
+    pass
+
+
+class D1andB2(D1, B2):
+    pass
+
+
+A.update_forward_refs()
+B1.update_forward_refs()
+B2.update_forward_refs()
+C1.update_forward_refs()
+D1.update_forward_refs()
+D1andB2.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/const.py 0.34.0-1/tests/data/expected/main/openapi/const.py
--- 0.26.4-3/tests/data/expected/main/openapi/const.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/const.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  const.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Namespace(BaseModel):
+    apiVersion: str = Field('v1', const=True)
+    kind: str = Field('Namespace', const=True)
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/const_field.py 0.34.0-1/tests/data/expected/main/openapi/const_field.py
--- 0.26.4-3/tests/data/expected/main/openapi/const_field.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/const_field.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  const.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Api(BaseModel):
+    version: str = Field('v1', const=True, description='The version of this API')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/const_field_dataclass.py 0.34.0-1/tests/data/expected/main/openapi/const_field_dataclass.py
--- 0.26.4-3/tests/data/expected/main/openapi/const_field_dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/const_field_dataclass.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  const.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Literal
+
+
+@dataclass
+class Api:
+    version: Literal['v1']
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/const_field_msgspec.py 0.34.0-1/tests/data/expected/main/openapi/const_field_msgspec.py
--- 0.26.4-3/tests/data/expected/main/openapi/const_field_msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/const_field_msgspec.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  const.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Literal
+
+from msgspec import Meta, Struct
+
+
+class Api(Struct):
+    version: Annotated[Literal['v1'], Meta(description='The version of this API')]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/const_field_pydantic_v2.py 0.34.0-1/tests/data/expected/main/openapi/const_field_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/const_field_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/const_field_pydantic_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  const.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal
+
+from pydantic import BaseModel, Field
+
+
+class Api(BaseModel):
+    version: Literal['v1'] = Field(..., description='The version of this API')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/const_field_typed_dict.py 0.34.0-1/tests/data/expected/main/openapi/const_field_typed_dict.py
--- 0.26.4-3/tests/data/expected/main/openapi/const_field_typed_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/const_field_typed_dict.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  const.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, TypedDict
+
+
+class Api(TypedDict):
+    version: Literal['v1']
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/const_pydantic_v2.py 0.34.0-1/tests/data/expected/main/openapi/const_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/const_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/const_pydantic_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  const.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal
+
+from pydantic import BaseModel
+
+
+class Namespace(BaseModel):
+    apiVersion: Literal['v1']
+    kind: Literal['Namespace']
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/content_in_parameters.py 0.34.0-1/tests/data/expected/main/openapi/content_in_parameters.py
--- 0.26.4-3/tests/data/expected/main/openapi/content_in_parameters.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/content_in_parameters.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  content_in_parameters.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class PetForm(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/custom_file_header.py 0.34.0-1/tests/data/expected/main/openapi/custom_file_header.py
--- 0.26.4-3/tests/data/expected/main/openapi/custom_file_header.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/custom_file_header.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,70 @@
+# multiline custom ;
+# header ;
+# file ;
+
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/custom_id_pydantic_v2.py 0.34.0-1/tests/data/expected/main/openapi/custom_id_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/custom_id_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/custom_id_pydantic_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  custom_id.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+from uuid import UUID
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class CustomId(RootModel[UUID]):
+    root: UUID = Field(..., description='My custom ID')
+
+
+class Model(BaseModel):
+    custom_id: Optional[CustomId] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/custom_id_pydantic_v2_custom_base.py 0.34.0-1/tests/data/expected/main/openapi/custom_id_pydantic_v2_custom_base.py
--- 0.26.4-3/tests/data/expected/main/openapi/custom_id_pydantic_v2_custom_base.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/custom_id_pydantic_v2_custom_base.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  custom_id.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+from uuid import UUID
+
+from pydantic import Field, RootModel
+
+from custom_base import Base
+
+
+class CustomId(RootModel[UUID]):
+    root: UUID = Field(..., description='My custom ID')
+
+
+class Model(Base):
+    custom_id: Optional[CustomId] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/custom_template_dir.py 0.34.0-1/tests/data/expected/main/openapi/custom_template_dir.py
--- 0.26.4-3/tests/data/expected/main/openapi/custom_template_dir.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/custom_template_dir.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,61 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):  # 1 2, 1 2, this is just a pet
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[AnyUrl] = None
+    apiDocumentationUrl: Optional[AnyUrl] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/dataclass.py 0.34.0-1/tests/data/expected/main/openapi/dataclass.py
--- 0.26.4-3/tests/data/expected/main/openapi/dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/dataclass.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,61 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import List, Optional
+
+
+@dataclass
+class Pet:
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+Pets = List[Pet]
+
+
+@dataclass
+class User:
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+Users = List[User]
+
+
+Id = str
+
+
+Rules = List[str]
+
+
+@dataclass
+class Error:
+    code: int
+    message: str
+
+
+@dataclass
+class Api:
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[str] = None
+    apiDocumentationUrl: Optional[str] = None
+
+
+Apis = List[Api]
+
+
+@dataclass
+class Event:
+    name: Optional[str] = None
+
+
+@dataclass
+class Result:
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/dataclass_base_class.py 0.34.0-1/tests/data/expected/main/openapi/dataclass_base_class.py
--- 0.26.4-3/tests/data/expected/main/openapi/dataclass_base_class.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/dataclass_base_class.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,63 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import List, Optional
+
+from custom_base import Base
+
+
+@dataclass
+class Pet(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+Pets = List[Pet]
+
+
+@dataclass
+class User(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+Users = List[User]
+
+
+Id = str
+
+
+Rules = List[str]
+
+
+@dataclass
+class Error(Base):
+    code: int
+    message: str
+
+
+@dataclass
+class Api(Base):
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[str] = None
+    apiDocumentationUrl: Optional[str] = None
+
+
+Apis = List[Api]
+
+
+@dataclass
+class Event(Base):
+    name: Optional[str] = None
+
+
+@dataclass
+class Result(Base):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/dataclass_keyword_only.py 0.34.0-1/tests/data/expected/main/openapi/dataclass_keyword_only.py
--- 0.26.4-3/tests/data/expected/main/openapi/dataclass_keyword_only.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/dataclass_keyword_only.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  inheritance.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Optional
+
+
+@dataclass(kw_only=True)
+class Base:
+    id: str
+    createdAt: Optional[str] = None
+    version: Optional[float] = 1
+
+
+@dataclass(kw_only=True)
+class Child(Base):
+    title: str
+    url: Optional[str] = 'https://example.com'
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/datetime.py 0.34.0-1/tests/data/expected/main/openapi/datetime.py
--- 0.26.4-3/tests/data/expected/main/openapi/datetime.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/datetime.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  datetime.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import datetime
+
+from pydantic import BaseModel, Field
+
+
+class InventoryItem(BaseModel):
+    releaseDate: datetime = Field(..., example='2016-08-29T09:12:33.001Z')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/datetime_dataclass.py 0.34.0-1/tests/data/expected/main/openapi/datetime_dataclass.py
--- 0.26.4-3/tests/data/expected/main/openapi/datetime_dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/datetime_dataclass.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  datetime.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from datetime import datetime
+
+
+@dataclass
+class InventoryItem:
+    releaseDate: datetime
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/datetime_msgspec.py 0.34.0-1/tests/data/expected/main/openapi/datetime_msgspec.py
--- 0.26.4-3/tests/data/expected/main/openapi/datetime_msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/datetime_msgspec.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  datetime.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import datetime
+
+from msgspec import Struct
+
+
+class InventoryItem(Struct):
+    releaseDate: datetime
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/datetime_pydantic_v2.py 0.34.0-1/tests/data/expected/main/openapi/datetime_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/datetime_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/datetime_pydantic_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  datetime.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import AwareDatetime, BaseModel, Field
+
+
+class InventoryItem(BaseModel):
+    releaseDate: AwareDatetime = Field(..., examples=['2016-08-29T09:12:33.001Z'])
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/datetime_pydantic_v2_datetime.py 0.34.0-1/tests/data/expected/main/openapi/datetime_pydantic_v2_datetime.py
--- 0.26.4-3/tests/data/expected/main/openapi/datetime_pydantic_v2_datetime.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/datetime_pydantic_v2_datetime.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  datetime.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import datetime
+
+from pydantic import BaseModel, Field
+
+
+class InventoryItem(BaseModel):
+    releaseDate: datetime = Field(..., examples=['2016-08-29T09:12:33.001Z'])
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/default_object/Another.py 0.34.0-1/tests/data/expected/main/openapi/default_object/Another.py
--- 0.26.4-3/tests/data/expected/main/openapi/default_object/Another.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/default_object/Another.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+from . import Foo as Foo_1
+from . import Nested
+
+
+class Foo(BaseModel):
+    __root__: str
+
+
+class Bar(BaseModel):
+    original_foo: Optional[Foo_1] = Field(
+        default_factory=lambda: Foo_1.parse_obj({'text': 'abc', 'number': 123})
+    )
+    nested_foo: Optional[List[Nested.Foo]] = Field(
+        default_factory=lambda: [Nested.Foo.parse_obj(v) for v in ['abc', 'efg']]
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/default_object/Nested.py 0.34.0-1/tests/data/expected/main/openapi/default_object/Nested.py
--- 0.26.4-3/tests/data/expected/main/openapi/default_object/Nested.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/default_object/Nested.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,30 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+from . import Foo as Foo_1
+
+
+class Foo(BaseModel):
+    __root__: str
+
+
+class Bar(BaseModel):
+    foo: Optional[Foo_1] = Field(
+        default_factory=lambda: Foo_1.parse_obj({'text': 'abc', 'number': 123})
+    )
+    baz: Optional[List[Foo_1]] = Field(
+        default_factory=lambda: [
+            Foo_1.parse_obj(v)
+            for v in [{'text': 'abc', 'number': 123}, {'text': 'efg', 'number': 456}]
+        ]
+    )
+    nested_foo: Optional[Foo] = Field(
+        default_factory=lambda: Foo.parse_obj('default foo')
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/default_object/__init__.py 0.34.0-1/tests/data/expected/main/openapi/default_object/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/default_object/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/default_object/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Foo(BaseModel):
+    text: Optional[str] = '987'
+    number: Optional[float] = None
+
+
+class Bar(BaseModel):
+    foo: Optional[Foo] = Field(
+        default_factory=lambda: Foo.parse_obj({'text': 'abc', 'number': 123})
+    )
+    baz: Optional[List[Foo]] = Field(
+        default_factory=lambda: [
+            Foo.parse_obj(v)
+            for v in [{'text': 'abc', 'number': 123}, {'text': 'efg', 'number': 456}]
+        ]
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/disable_appending_item_suffix.py 0.34.0-1/tests/data/expected/main/openapi/disable_appending_item_suffix.py
--- 0.26.4-3/tests/data/expected/main/openapi/disable_appending_item_suffix.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/disable_appending_item_suffix.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int = Field(..., ge=0, le=9223372036854775807)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+
+
+class Pets(BaseModel):
+    __root__: List[Pet] = Field(..., max_items=10, min_items=1, unique_items=True)
+
+
+class UID(BaseModel):
+    __root__: int = Field(..., ge=0)
+
+
+class Phone(BaseModel):
+    __root__: str = Field(..., min_length=3)
+
+
+class Fax(BaseModel):
+    __root__: str = Field(..., min_length=3)
+
+
+class User(BaseModel):
+    id: int = Field(..., ge=0)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+    uid: UID
+    phones: Optional[List[Phone]] = Field(None, max_items=10)
+    fax: Optional[List[Fax]] = None
+    height: Optional[Union[int, float]] = Field(None, ge=1.0, le=300.0)
+    weight: Optional[Union[float, int]] = Field(None, ge=1.0, le=1000.0)
+    age: Optional[int] = Field(None, gt=0, le=200)
+    rating: Optional[float] = Field(None, gt=0.0, le=5.0)
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/disable_timestamp.py 0.34.0-1/tests/data/expected/main/openapi/disable_timestamp.py
--- 0.26.4-3/tests/data/expected/main/openapi/disable_timestamp.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/disable_timestamp.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,68 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/dataclass_enum_one_literal_as_default.py 0.34.0-1/tests/data/expected/main/openapi/discriminator/dataclass_enum_one_literal_as_default.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/dataclass_enum_one_literal_as_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/discriminator/dataclass_enum_one_literal_as_default.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_enum.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from enum import Enum
+from typing import Literal, Union
+
+
+class RequestVersionEnum(Enum):
+    v1 = 'v1'
+    v2 = 'v2'
+
+
+@dataclass
+class RequestBase:
+    version: RequestVersionEnum
+
+
+@dataclass
+class RequestV1(RequestBase):
+    request_id: str
+    version: Literal['v1'] = 'v1'
+
+
+@dataclass
+class RequestV2(RequestBase):
+    version: Literal['v2'] = 'v2'
+
+
+Request = Union[RequestV1, RequestV2]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum.py 0.34.0-1/tests/data/expected/main/openapi/discriminator/enum.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/discriminator/enum.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_enum.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal, Union
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class RequestVersionEnum(Enum):
+    v1 = 'v1'
+    v2 = 'v2'
+
+
+class RequestBase(BaseModel):
+    version: RequestVersionEnum
+
+
+class RequestV1(RequestBase):
+    request_id: str = Field(..., description='there is description', title='test title')
+    version: Literal['v1']
+
+
+class RequestV2(RequestBase):
+    version: Literal['v2']
+
+
+class Request(RootModel[Union[RequestV1, RequestV2]]):
+    root: Union[RequestV1, RequestV2] = Field(..., discriminator='version')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_duplicate.py 0.34.0-1/tests/data/expected/main/openapi/discriminator/enum_duplicate.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_duplicate.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/discriminator/enum_duplicate.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,37 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_enum_duplicate.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Cat(BaseModel):
+    pet_type: Literal['cat'] = Field(..., title='Pet Type')
+    meows: int = Field(..., title='Meows')
+
+
+class Dog(BaseModel):
+    pet_type: Literal['dog'] = Field(..., title='Pet Type')
+    barks: float = Field(..., title='Barks')
+
+
+class PetType(Enum):
+    reptile = 'reptile'
+    lizard = 'lizard'
+
+
+class Lizard(BaseModel):
+    pet_type: Literal['lizard', 'reptile'] = Field(..., title='Pet Type')
+    scales: bool = Field(..., title='Scales')
+
+
+class Animal(BaseModel):
+    pet: Optional[Union[Cat, Dog, Lizard]] = Field(
+        None, discriminator='pet_type', title='Pet'
+    )
+    n: Optional[int] = Field(None, title='N')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_one_literal_as_default.py 0.34.0-1/tests/data/expected/main/openapi/discriminator/enum_one_literal_as_default.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_one_literal_as_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/discriminator/enum_one_literal_as_default.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_enum.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal, Union
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class RequestVersionEnum(Enum):
+    v1 = 'v1'
+    v2 = 'v2'
+
+
+class RequestBase(BaseModel):
+    version: RequestVersionEnum
+
+
+class RequestV1(RequestBase):
+    request_id: str = Field(..., description='there is description', title='test title')
+    version: Literal['v1'] = 'v1'
+
+
+class RequestV2(RequestBase):
+    version: Literal['v2'] = 'v2'
+
+
+class Request(RootModel[Union[RequestV1, RequestV2]]):
+    root: Union[RequestV1, RequestV2] = Field(..., discriminator='version')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/general.py 0.34.0-1/tests/data/expected/main/openapi/discriminator/general.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/general.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/discriminator/general.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,36 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Type(Enum):
+    my_first_object = 'my_first_object'
+    my_second_object = 'my_second_object'
+    my_third_object = 'my_third_object'
+
+
+class ObjectBase(BaseModel):
+    name: Optional[str] = Field(None, description='Name of the object')
+    type: Literal['type1'] = Field(..., description='Object type')
+
+
+class CreateObjectRequest(ObjectBase):
+    name: str = Field(..., description='Name of the object')
+    type: Literal['type2'] = Field(..., description='Object type')
+
+
+class UpdateObjectRequest(ObjectBase):
+    type: Literal['type3']
+
+
+class Demo(BaseModel):
+    __root__: Union[ObjectBase, CreateObjectRequest, UpdateObjectRequest] = Field(
+        ..., discriminator='type'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/in_array.py 0.34.0-1/tests/data/expected/main/openapi/discriminator/in_array.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/in_array.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/discriminator/in_array.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_in_array.yaml
+#   timestamp: 2023-07-27T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Type(Enum):
+    my_first_object = 'my_first_object'
+    my_second_object = 'my_second_object'
+
+
+class ObjectBase(BaseModel):
+    name: Optional[str] = Field(None, description='Name of the object')
+    type: Literal['type1'] = Field(..., description='Object type')
+
+
+class CreateObjectRequest(ObjectBase):
+    name: str = Field(..., description='Name of the object')
+    type: Literal['type2'] = Field(..., description='Object type')
+
+
+class UpdateObjectRequest(ObjectBase):
+    type: Literal['type3']
+
+
+class MyArray(BaseModel):
+    __root__: Union[ObjectBase, CreateObjectRequest, UpdateObjectRequest] = Field(
+        ..., discriminator='type'
+    )
+
+
+class Demo(BaseModel):
+    myArray: List[MyArray]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/in_array_collapse_root_models.py 0.34.0-1/tests/data/expected/main/openapi/discriminator/in_array_collapse_root_models.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/in_array_collapse_root_models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/discriminator/in_array_collapse_root_models.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_in_array.yaml
+#   timestamp: 2023-07-27T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Type(Enum):
+    my_first_object = 'my_first_object'
+    my_second_object = 'my_second_object'
+
+
+class ObjectBase(BaseModel):
+    name: Optional[str] = Field(None, description='Name of the object')
+    type: Literal['type1'] = Field(..., description='Object type')
+
+
+class CreateObjectRequest(ObjectBase):
+    name: str = Field(..., description='Name of the object')
+    type: Literal['type2'] = Field(..., description='Object type')
+
+
+class UpdateObjectRequest(ObjectBase):
+    type: Literal['type3']
+
+
+class Demo(BaseModel):
+    myArray: List[Union[ObjectBase, CreateObjectRequest, UpdateObjectRequest]]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/with_properties.py 0.34.0-1/tests/data/expected/main/openapi/discriminator/with_properties.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/with_properties.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/discriminator/with_properties.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,44 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_properties.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional, Union
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class UserContextVariable(BaseModel):
+    accountId: str = Field(..., description='The account ID of the user.')
+    field_type: str = Field(
+        ..., alias='@type', description='Type of custom context variable.'
+    )
+
+
+class IssueContextVariable(BaseModel):
+    id: Optional[int] = Field(None, description='The issue ID.')
+    key: Optional[str] = Field(None, description='The issue key.')
+    field_type: str = Field(
+        ..., alias='@type', description='Type of custom context variable.'
+    )
+
+
+class CustomContextVariable1(UserContextVariable):
+    field_type: Literal['user'] = Field(
+        ..., alias='@type', description='Type of custom context variable.'
+    )
+
+
+class CustomContextVariable2(IssueContextVariable):
+    field_type: Literal['issue'] = Field(
+        ..., alias='@type', description='Type of custom context variable.'
+    )
+
+
+class CustomContextVariable(
+    RootModel[Union[CustomContextVariable1, CustomContextVariable2]]
+):
+    root: Union[CustomContextVariable1, CustomContextVariable2] = Field(
+        ..., discriminator='field_type'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/without_mapping.py 0.34.0-1/tests/data/expected/main/openapi/discriminator/without_mapping.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/without_mapping.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/discriminator/without_mapping.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,36 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_without_mapping.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Type(Enum):
+    my_first_object = 'my_first_object'
+    my_second_object = 'my_second_object'
+    my_third_object = 'my_third_object'
+
+
+class ObjectBase(BaseModel):
+    name: Optional[str] = Field(None, description='Name of the object')
+    type: Literal['ObjectBase'] = Field(..., description='Object type')
+
+
+class CreateObjectRequest(ObjectBase):
+    name: str = Field(..., description='Name of the object')
+    type: Literal['CreateObjectRequest'] = Field(..., description='Object type')
+
+
+class UpdateObjectRequest(ObjectBase):
+    type: Literal['UpdateObjectRequest']
+
+
+class Demo(BaseModel):
+    __root__: Union[ObjectBase, CreateObjectRequest, UpdateObjectRequest] = Field(
+        ..., discriminator='type'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/duplicate_models2.py 0.34.0-1/tests/data/expected/main/openapi/duplicate_models2.py
--- 0.26.4-3/tests/data/expected/main/openapi/duplicate_models2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/duplicate_models2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,42 @@
+# generated by datamodel-codegen:
+#   filename:  duplicate_models2.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class PetType(Enum):
+    pet = 'pet'
+
+
+class PetDetails(BaseModel):
+    race: Optional[str] = None
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    type: PetType
+    details: Optional[PetDetails] = None
+
+
+class CarType(Enum):
+    car = 'car'
+
+
+class CarDetails(BaseModel):
+    brand: Optional[str] = None
+
+
+class Car(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    type: CarType
+    details: Optional[CarDetails] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/enable_faux_immutability.py 0.34.0-1/tests/data/expected/main/openapi/enable_faux_immutability.py
--- 0.26.4-3/tests/data/expected/main/openapi/enable_faux_immutability.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/enable_faux_immutability.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,102 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    __root__: str
+
+
+class Rules(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/enable_faux_immutability_pydantic_v2.py 0.34.0-1/tests/data/expected/main/openapi/enable_faux_immutability_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/enable_faux_immutability_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/enable_faux_immutability_pydantic_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,102 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, ConfigDict, Field, RootModel
+
+
+class Pet(BaseModel):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(RootModel[List[Pet]]):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    root: List[Pet]
+
+
+class User(BaseModel):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(RootModel[List[User]]):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    root: List[User]
+
+
+class Id(RootModel[str]):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    root: str
+
+
+class Rules(RootModel[List[str]]):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    root: List[str]
+
+
+class Error(BaseModel):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[List[Api]]):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    root: List[Api]
+
+
+class Event(BaseModel):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/enable_version_header.py 0.34.0-1/tests/data/expected/main/openapi/enable_version_header.py
--- 0.26.4-3/tests/data/expected/main/openapi/enable_version_header.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/enable_version_header.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,70 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+#   version:   0.0.0
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/enum_models/all.py 0.34.0-1/tests/data/expected/main/openapi/enum_models/all.py
--- 0.26.4-3/tests/data/expected/main/openapi/enum_models/all.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/enum_models/all.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,72 @@
+# generated by datamodel-codegen:
+#   filename:  enum_models.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    kind: Optional[Literal['dog', 'cat']] = None
+    type: Optional[Literal['animal']] = None
+    number: Literal[1]
+    boolean: Literal[True]
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class Animal(BaseModel):
+    kind: Optional[Literal['snake', 'rabbit']] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class EnumObject(BaseModel):
+    type: Optional[Literal['a', 'b']] = None
+
+
+class EnumRoot(BaseModel):
+    __root__: Literal['a', 'b']
+
+
+class IntEnum(BaseModel):
+    __root__: Literal[1, 2]
+
+
+class AliasEnum(BaseModel):
+    __root__: Literal[1, 2, 3]
+
+
+class MultipleTypeEnum(BaseModel):
+    __root__: Literal['red', 'amber', 'green', 42]
+
+
+class SingleEnum(BaseModel):
+    __root__: Literal['pet']
+
+
+class ArrayEnum(BaseModel):
+    __root__: List[Union[Literal['cat'], Literal['dog']]]
+
+
+class NestedNullableEnum(BaseModel):
+    nested_version: Optional[
+        Literal['RC1', 'RC1N', 'RC2', 'RC2N', 'RC3', 'RC4']
+    ] = Field('RC1', description='nullable enum', example='RC2')
+
+
+class Version(BaseModel):
+    __root__: Optional[Literal['RC1', 'RC1N', 'RC2', 'RC2N', 'RC3', 'RC4']] = Field(
+        'RC1', description='nullable enum', example='RC2'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/enum_models/as_literal.py 0.34.0-1/tests/data/expected/main/openapi/enum_models/as_literal.py
--- 0.26.4-3/tests/data/expected/main/openapi/enum_models/as_literal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/enum_models/as_literal.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,72 @@
+# generated by datamodel-codegen:
+#   filename:  enum_models.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    kind: Optional[Literal['dog', 'cat']] = None
+    type: Optional[Literal['animal']] = None
+    number: Literal[1]
+    boolean: Literal[True]
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class Animal(BaseModel):
+    kind: Optional[Literal['snake', 'rabbit']] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class EnumObject(BaseModel):
+    type: Optional[Literal['a', 'b']] = None
+
+
+class EnumRoot(BaseModel):
+    __root__: Literal['a', 'b']
+
+
+class IntEnum(BaseModel):
+    __root__: Literal[1, 2]
+
+
+class AliasEnum(BaseModel):
+    __root__: Literal[1, 2, 3]
+
+
+class MultipleTypeEnum(BaseModel):
+    __root__: Literal['red', 'amber', 'green', 42]
+
+
+class SingleEnum(BaseModel):
+    __root__: Literal['pet']
+
+
+class ArrayEnum(BaseModel):
+    __root__: List[Union[Literal['cat'], Literal['dog']]]
+
+
+class NestedNullableEnum(BaseModel):
+    nested_version: Optional[
+        Literal['RC1', 'RC1N', 'RC2', 'RC2N', 'RC3', 'RC4']
+    ] = Field('RC1', description='nullable enum', example='RC2')
+
+
+class Version(BaseModel):
+    __root__: Optional[Literal['RC1', 'RC1N', 'RC2', 'RC2N', 'RC3', 'RC4']] = Field(
+        'RC1', description='nullable enum', example='RC2'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/enum_models/one.py 0.34.0-1/tests/data/expected/main/openapi/enum_models/one.py
--- 0.26.4-3/tests/data/expected/main/openapi/enum_models/one.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/enum_models/one.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,122 @@
+# generated by datamodel-codegen:
+#   filename:  enum_models.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Kind(Enum):
+    dog = 'dog'
+    cat = 'cat'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    kind: Optional[Kind] = None
+    type: Optional[Literal['animal']] = None
+    number: Literal[1]
+    boolean: Literal[True]
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class Kind1(Enum):
+    snake = 'snake'
+    rabbit = 'rabbit'
+
+
+class Animal(BaseModel):
+    kind: Optional[Kind1] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Type(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class EnumObject(BaseModel):
+    type: Optional[Type] = None
+
+
+class EnumRoot(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class IntEnum(Enum):
+    number_1 = 1
+    number_2 = 2
+
+
+class AliasEnum(Enum):
+    a = 1
+    b = 2
+    c = 3
+
+
+class MultipleTypeEnum(Enum):
+    red = 'red'
+    amber = 'amber'
+    green = 'green'
+    NoneType_None = None
+    int_42 = 42
+
+
+class SingleEnum(BaseModel):
+    __root__: Literal['pet']
+
+
+class ArrayEnum(BaseModel):
+    __root__: List[Union[Literal['cat'], Literal['dog']]]
+
+
+class NestedVersionEnum(Enum):
+    RC1 = 'RC1'
+    RC1N = 'RC1N'
+    RC2 = 'RC2'
+    RC2N = 'RC2N'
+    RC3 = 'RC3'
+    RC4 = 'RC4'
+
+
+class NestedVersion(BaseModel):
+    __root__: Optional[NestedVersionEnum] = Field(
+        'RC1', description='nullable enum', example='RC2'
+    )
+
+
+class NestedNullableEnum(BaseModel):
+    nested_version: Optional[NestedVersion] = Field(
+        default_factory=lambda: NestedVersion.parse_obj('RC1'),
+        description='nullable enum',
+        example='RC2',
+    )
+
+
+class VersionEnum(Enum):
+    RC1 = 'RC1'
+    RC1N = 'RC1N'
+    RC2 = 'RC2'
+    RC2N = 'RC2N'
+    RC3 = 'RC3'
+    RC4 = 'RC4'
+
+
+class Version(BaseModel):
+    __root__: Optional[VersionEnum] = Field(
+        'RC1', description='nullable enum', example='RC2'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/enum_models/one_literal_as_default.py 0.34.0-1/tests/data/expected/main/openapi/enum_models/one_literal_as_default.py
--- 0.26.4-3/tests/data/expected/main/openapi/enum_models/one_literal_as_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/enum_models/one_literal_as_default.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,122 @@
+# generated by datamodel-codegen:
+#   filename:  enum_models.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Kind(Enum):
+    dog = 'dog'
+    cat = 'cat'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    kind: Optional[Kind] = None
+    type: Optional[Literal['animal']] = None
+    number: Literal[1] = 1
+    boolean: Literal[True] = True
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class Kind1(Enum):
+    snake = 'snake'
+    rabbit = 'rabbit'
+
+
+class Animal(BaseModel):
+    kind: Optional[Kind1] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Type(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class EnumObject(BaseModel):
+    type: Optional[Type] = None
+
+
+class EnumRoot(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class IntEnum(Enum):
+    number_1 = 1
+    number_2 = 2
+
+
+class AliasEnum(Enum):
+    a = 1
+    b = 2
+    c = 3
+
+
+class MultipleTypeEnum(Enum):
+    red = 'red'
+    amber = 'amber'
+    green = 'green'
+    NoneType_None = None
+    int_42 = 42
+
+
+class SingleEnum(BaseModel):
+    __root__: Literal['pet'] = 'pet'
+
+
+class ArrayEnum(BaseModel):
+    __root__: List[Union[Literal['cat'], Literal['dog']]]
+
+
+class NestedVersionEnum(Enum):
+    RC1 = 'RC1'
+    RC1N = 'RC1N'
+    RC2 = 'RC2'
+    RC2N = 'RC2N'
+    RC3 = 'RC3'
+    RC4 = 'RC4'
+
+
+class NestedVersion(BaseModel):
+    __root__: Optional[NestedVersionEnum] = Field(
+        'RC1', description='nullable enum', example='RC2'
+    )
+
+
+class NestedNullableEnum(BaseModel):
+    nested_version: Optional[NestedVersion] = Field(
+        default_factory=lambda: NestedVersion.parse_obj('RC1'),
+        description='nullable enum',
+        example='RC2',
+    )
+
+
+class VersionEnum(Enum):
+    RC1 = 'RC1'
+    RC1N = 'RC1N'
+    RC2 = 'RC2'
+    RC2N = 'RC2N'
+    RC3 = 'RC3'
+    RC4 = 'RC4'
+
+
+class Version(BaseModel):
+    __root__: Optional[VersionEnum] = Field(
+        'RC1', description='nullable enum', example='RC2'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/external_relative_ref/module_openapi/__init__.py 0.34.0-1/tests/data/expected/main/openapi/external_relative_ref/module_openapi/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/external_relative_ref/module_openapi/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/external_relative_ref/module_openapi/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  model_b
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/external_relative_ref/module_openapi/model_a.py 0.34.0-1/tests/data/expected/main/openapi/external_relative_ref/module_openapi/model_a.py
--- 0.26.4-3/tests/data/expected/main/openapi/external_relative_ref/module_openapi/model_a.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/external_relative_ref/module_openapi/model_a.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  module.openapi.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Input(BaseModel):
+    name: Optional[str] = None
+
+
+class Output(BaseModel):
+    output: Optional[str] = None
+    input: Optional[Input] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/external_relative_ref/module_openapi/modules/__init__.py 0.34.0-1/tests/data/expected/main/openapi/external_relative_ref/module_openapi/modules/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/external_relative_ref/module_openapi/modules/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/external_relative_ref/module_openapi/modules/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  model_b
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/external_relative_ref/module_openapi/modules/quality_evaluation.py 0.34.0-1/tests/data/expected/main/openapi/external_relative_ref/module_openapi/modules/quality_evaluation.py
--- 0.26.4-3/tests/data/expected/main/openapi/external_relative_ref/module_openapi/modules/quality_evaluation.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/external_relative_ref/module_openapi/modules/quality_evaluation.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  module.openapi.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import model_a
+
+
+class QualityEvaluationRequest(BaseModel):
+    input: Optional[model_a.Output] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/extra_template_data_config.py 0.34.0-1/tests/data/expected/main/openapi/extra_template_data_config.py
--- 0.26.4-3/tests/data/expected/main/openapi/extra_template_data_config.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/extra_template_data_config.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,72 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):  # 1 2, 1 2, this is just a pet
+    class Config:
+        arbitrary_types_allowed = True
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/extra_template_data_config_pydantic_v2.py 0.34.0-1/tests/data/expected/main/openapi/extra_template_data_config_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/extra_template_data_config_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/extra_template_data_config_pydantic_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,73 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, ConfigDict, Field, RootModel
+
+
+class Pet(BaseModel):  # 1 2, 1 2, this is just a pet
+    model_config = ConfigDict(
+        arbitrary_types_allowed=True,
+        coerce_numbers_to_str=True,
+    )
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(RootModel[List[Pet]]):
+    root: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(RootModel[List[User]]):
+    root: List[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[List[str]]):
+    root: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[List[Api]]):
+    root: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/force_optional.py 0.34.0-1/tests/data/expected/main/openapi/force_optional.py
--- 0.26.4-3/tests/data/expected/main/openapi/force_optional.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/force_optional.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: Optional[int] = 1
+    name: Optional[str] = None
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: Optional[List[Pet]] = None
+
+
+class User(BaseModel):
+    id: Optional[int] = None
+    name: Optional[str] = None
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: Optional[List[User]] = None
+
+
+class Id(BaseModel):
+    __root__: Optional[str] = None
+
+
+class Rules(BaseModel):
+    __root__: Optional[List[str]] = None
+
+
+class Error(BaseModel):
+    code: Optional[int] = None
+    message: Optional[str] = None
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: Optional[List[Api]] = None
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/general.py 0.34.0-1/tests/data/expected/main/openapi/general.py
--- 0.26.4-3/tests/data/expected/main/openapi/general.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/general.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/http_refs.py 0.34.0-1/tests/data/expected/main/openapi/http_refs.py
--- 0.26.4-3/tests/data/expected/main/openapi/http_refs.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/http_refs.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,35 @@
+# generated by datamodel-codegen:
+#   filename:  https://example.com/refs.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AnyUrl, BaseModel, Field, conint
+
+
+class Problem(BaseModel):
+    detail: Optional[str] = Field(
+        None,
+        description='A human readable explanation specific to this occurrence of the\nproblem. You MUST NOT expose internal information, personal\ndata or implementation details through this field.\n',
+        example='Request took too long to complete.',
+    )
+    instance: Optional[AnyUrl] = Field(
+        None,
+        description='An absolute URI that identifies the specific occurrence of the problem.\nIt may or may not yield further information if dereferenced.\n',
+    )
+    status: Optional[conint(ge=100, lt=600)] = Field(
+        None,
+        description='The HTTP status code generated by the origin server for this occurrence\nof the problem.\n',
+        example=503,
+    )
+    title: Optional[str] = Field(
+        None,
+        description='A short, summary of the problem type. Written in english and readable\nfor engineers (usually not suited for non technical stakeholders and\nnot localized); example: Service Unavailable\n',
+    )
+    type: Optional[AnyUrl] = Field(
+        'about:blank',
+        description='An absolute URI that identifies the problem type.  When dereferenced,\nit SHOULD provide human-readable documentation for the problem type\n(e.g., using HTML).\n',
+        example='https://tools.ietf.org/html/rfc7231#section-6.6.4',
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/json_pointer.py 0.34.0-1/tests/data/expected/main/openapi/json_pointer.py
--- 0.26.4-3/tests/data/expected/main/openapi/json_pointer.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/json_pointer.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  json_pointer.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class TestNestedNested(BaseModel):
+    test_nested_nested_string: Optional[str] = None
+
+
+class TestNested(BaseModel):
+    test_string: Optional[str] = None
+    nested_nested: Optional[TestNestedNested] = None
+
+
+class Test(TestNested):
+    pass
+
+
+class Foo(Test):
+    pass
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/max_items_enum.py 0.34.0-1/tests/data/expected/main/openapi/max_items_enum.py
--- 0.26.4-3/tests/data/expected/main/openapi/max_items_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/max_items_enum.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  max_items_enum.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class BarEnum(Enum):
+    hello = 'hello'
+    goodbye = 'goodbye'
+
+
+class Foo(BaseModel):
+    bar: Optional[List[BarEnum]] = Field(None, max_items=3)
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/max_min_number.py 0.34.0-1/tests/data/expected/main/openapi/max_min_number.py
--- 0.26.4-3/tests/data/expected/main/openapi/max_min_number.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/max_min_number.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  max_min_number.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, confloat
+
+
+class Product(BaseModel):
+    price: Optional[confloat(ge=-999999.999999, le=999999.999999)] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/__init__.py 0.34.0-1/tests/data/expected/main/openapi/modular/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import foo as foo_1
+from . import models
+from .nested import foo as foo_2
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[foo_1.Tea] = None
+    nested: Optional[foo_2.Tea] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/bar.py 0.34.0-1/tests/data/expected/main/openapi/modular/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class FieldModel(BaseModel):
+    __root__: str = Field(..., example='green')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/collections.py 0.34.0-1/tests/data/expected/main/openapi/modular/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular/collections.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,51 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: List[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: List[models.User]
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/foo/__init__.py 0.34.0-1/tests/data/expected/main/openapi/modular/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular/foo/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import Id
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/foo/bar.py 0.34.0-1/tests/data/expected/main/openapi/modular/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular/foo/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[Dict[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[List[Dict[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/models.py 0.34.0-1/tests/data/expected/main/openapi/modular/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular/models.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[Union[str, float, int, bool, Dict[str, Any], List[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/nested/__init__.py 0.34.0-1/tests/data/expected/main/openapi/modular/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular/nested/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/nested/foo.py 0.34.0-1/tests/data/expected/main/openapi/modular/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular/nested/foo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+from .. import Id, OptionalModel
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class TeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class ListModel(BaseModel):
+    __root__: List[Tea]
+
+
+Tea.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/woo/__init__.py 0.34.0-1/tests/data/expected/main/openapi/modular/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular/woo/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/woo/boo.py 0.34.0-1/tests/data/expected/main/openapi/modular/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular/woo/boo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import Source, bar, foo
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[foo.Cocoa] = None
+    field: Optional[bar.FieldModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/__init__.py 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import foo as foo_1
+from . import models
+from .nested import foo as foo_2
+
+
+class CustomOptional(BaseModel):
+    __root__: str
+
+
+class CustomId(BaseModel):
+    __root__: str
+
+
+class CustomError(BaseModel):
+    code: int
+    message: str
+
+
+class CustomResult(BaseModel):
+    event: Optional[models.CustomEvent] = None
+
+
+class CustomSource(BaseModel):
+    country: Optional[str] = None
+
+
+class CustomDifferentTea(BaseModel):
+    foo: Optional[foo_1.CustomTea] = None
+    nested: Optional[foo_2.CustomTea] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/bar.py 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class CustomField(BaseModel):
+    __root__: str = Field(..., example='green')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/collections.py 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/collections.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,51 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class CustomPets(BaseModel):
+    __root__: List[models.CustomPet]
+
+
+class CustomUsers(BaseModel):
+    __root__: List[models.CustomUser]
+
+
+class CustomRules(BaseModel):
+    __root__: List[str]
+
+
+class CustomStage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class CustomApi(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[CustomStage] = None
+
+
+class CustomApis(BaseModel):
+    __root__: List[CustomApi]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/foo/__init__.py 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/foo/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import CustomId
+
+
+class CustomTea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[CustomId] = None
+
+
+class CustomCocoa(BaseModel):
+    quality: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/foo/bar.py 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/foo/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel
+
+
+class CustomThing(BaseModel):
+    attributes: Optional[Dict[str, Any]] = None
+
+
+class CustomThang(BaseModel):
+    attributes: Optional[List[Dict[str, Any]]] = None
+
+
+class CustomOthers(BaseModel):
+    name: Optional[str] = None
+
+
+class CustomClone(CustomThing):
+    others: Optional[CustomOthers] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/models.py 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/models.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class CustomSpecies(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class CustomPet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[CustomSpecies] = None
+
+
+class CustomUser(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class CustomEvent(BaseModel):
+    name: Optional[Union[str, float, int, bool, Dict[str, Any], List[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/nested/__init__.py 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/nested/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/nested/foo.py 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/nested/foo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+from .. import CustomId, CustomOptional
+
+
+class CustomTea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[CustomId] = None
+    self: Optional[CustomTea] = None
+    optional: Optional[List[CustomOptional]] = None
+
+
+class CustomTeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[CustomId] = None
+    self: Optional[CustomTea] = None
+    optional: Optional[List[CustomOptional]] = None
+
+
+class CustomList(BaseModel):
+    __root__: List[CustomTea]
+
+
+CustomTea.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/woo/__init__.py 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/woo/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/woo/boo.py 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_custom_class_name/woo/boo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import CustomSource, bar, foo
+
+
+class CustomChocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[CustomSource] = None
+    cocoa: Optional[foo.CustomCocoa] = None
+    field: Optional[bar.CustomField] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/__init__.py 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import foo as foo_1
+from . import models
+from .nested import foo as foo_2
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(OptionalModel):
+    pass
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[foo_1.Tea] = None
+    nested: Optional[foo_2.Tea] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/bar.py 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class FieldModel(BaseModel):
+    __root__: str = Field(..., example='green')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/collections.py 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/collections.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,51 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: List[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: List[models.User]
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/foo/__init__.py 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/foo/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import Id
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/foo/bar.py 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/foo/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[Dict[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[List[Dict[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/models.py 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/models.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[Union[str, float, int, bool, Dict[str, Any], List[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/nested/__init__.py 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/nested/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/nested/foo.py 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/nested/foo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,30 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+from .. import Id, OptionalModel
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class TeaClone(Tea):
+    pass
+
+
+class ListModel(BaseModel):
+    __root__: List[Tea]
+
+
+Tea.update_forward_refs()
+TeaClone.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/woo/__init__.py 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/woo/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/woo/boo.py 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_reuse_model/woo/boo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import Source, bar, foo
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[foo.Cocoa] = None
+    field: Optional[bar.FieldModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/__init__.py 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,34 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import NotRequired, TypedDict
+
+from . import foo as foo_1
+from . import models
+from .nested import foo as foo_2
+
+Optional = str
+
+
+Id = str
+
+
+class Error(TypedDict):
+    code: int
+    message: str
+
+
+class Result(TypedDict):
+    event: NotRequired[models.Event]
+
+
+class Source(TypedDict):
+    country: NotRequired[str]
+
+
+class DifferentTea(TypedDict):
+    foo: NotRequired[foo_1.Tea]
+    nested: NotRequired[foo_2.Tea]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/bar.py 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+Field = str
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/collections.py 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/collections.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,28 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, NotRequired, TypedDict
+
+from . import models
+
+Pets = List[models.Pet]
+
+
+Users = List[models.User]
+
+
+Rules = List[str]
+
+
+class Api(TypedDict):
+    apiKey: NotRequired[str]
+    apiVersionNumber: NotRequired[str]
+    apiUrl: NotRequired[str]
+    apiDocumentationUrl: NotRequired[str]
+    stage: NotRequired[Literal['test', 'dev', 'stg', 'prod']]
+
+
+Apis = List[Api]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/foo/__init__.py 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/foo/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import NotRequired, TypedDict
+
+from .. import Id
+
+
+class Tea(TypedDict):
+    flavour: NotRequired[str]
+    id: NotRequired[Id]
+
+
+class Cocoa(TypedDict):
+    quality: NotRequired[int]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/foo/bar.py 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/foo/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, NotRequired, TypedDict
+
+
+class Thing(TypedDict):
+    attributes: NotRequired[Dict[str, Any]]
+
+
+class Thang(TypedDict):
+    attributes: NotRequired[List[Dict[str, Any]]]
+
+
+class Others(TypedDict):
+    name: NotRequired[str]
+
+
+class Clone(Thing):
+    others: NotRequired[Others]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/models.py 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/models.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Literal, NotRequired, TypedDict, Union
+
+Species = Literal['dog', 'cat', 'snake']
+
+
+class Pet(TypedDict):
+    id: int
+    name: str
+    tag: NotRequired[str]
+    species: NotRequired[Species]
+
+
+class User(TypedDict):
+    id: int
+    name: str
+    tag: NotRequired[str]
+
+
+class Event(TypedDict):
+    name: NotRequired[Union[str, float, int, bool, Dict[str, Any], List[str]]]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/nested/__init__.py 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/nested/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/nested/foo.py 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/nested/foo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, NotRequired, TypedDict
+
+from .. import Id, Optional
+
+
+class Tea(TypedDict):
+    flavour: NotRequired[str]
+    id: NotRequired[Id]
+    self: NotRequired[Tea]
+    optional: NotRequired[List[Optional]]
+
+
+class TeaClone(TypedDict):
+    flavour: NotRequired[str]
+    id: NotRequired[Id]
+    self: NotRequired[Tea]
+    optional: NotRequired[List[Optional]]
+
+
+ListModel = List[Tea]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/woo/__init__.py 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/woo/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/woo/boo.py 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/modular_typed_dict/woo/boo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import NotRequired, TypedDict
+
+from .. import Source, bar, foo
+
+
+class Chocolate(TypedDict):
+    flavour: NotRequired[str]
+    source: NotRequired[Source]
+    cocoa: NotRequired[foo.Cocoa]
+    field: NotRequired[bar.Field]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_default_object/Another.py 0.34.0-1/tests/data/expected/main/openapi/msgspec_default_object/Another.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_default_object/Another.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/msgspec_default_object/Another.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from msgspec import Struct, convert, field
+
+from . import Foo as Foo_1
+from . import Nested
+
+Foo = str
+
+
+class Bar(Struct):
+    original_foo: Optional[Foo_1] = field(
+        default_factory=lambda: convert({'text': 'abc', 'number': 123}, type=Foo_1)
+    )
+    nested_foo: Optional[List[Nested.Foo]] = field(
+        default_factory=lambda: convert(['abc', 'efg'], type=list[Nested.Foo])
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_default_object/Nested.py 0.34.0-1/tests/data/expected/main/openapi/msgspec_default_object/Nested.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_default_object/Nested.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/msgspec_default_object/Nested.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from msgspec import Struct, convert, field
+
+from . import Foo as Foo_1
+
+Foo = str
+
+
+class Bar(Struct):
+    foo: Optional[Foo_1] = field(
+        default_factory=lambda: convert({'text': 'abc', 'number': 123}, type=Foo_1)
+    )
+    baz: Optional[List[Foo_1]] = field(
+        default_factory=lambda: convert(
+            [{'text': 'abc', 'number': 123}, {'text': 'efg', 'number': 456}],
+            type=list[Foo_1],
+        )
+    )
+    nested_foo: Optional[Foo] = 'default foo'
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_default_object/__init__.py 0.34.0-1/tests/data/expected/main/openapi/msgspec_default_object/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_default_object/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/msgspec_default_object/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from msgspec import Struct, convert, field
+
+
+class Foo(Struct):
+    text: Optional[str] = '987'
+    number: Optional[float] = None
+
+
+class Bar(Struct):
+    foo: Optional[Foo] = field(
+        default_factory=lambda: convert({'text': 'abc', 'number': 123}, type=Foo)
+    )
+    baz: Optional[List[Foo]] = field(
+        default_factory=lambda: convert(
+            [{'text': 'abc', 'number': 123}, {'text': 'efg', 'number': 456}],
+            type=list[Foo],
+        )
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_keyword_only.py 0.34.0-1/tests/data/expected/main/openapi/msgspec_keyword_only.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_keyword_only.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/msgspec_keyword_only.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  inheritance.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from msgspec import Struct
+
+
+class Base(Struct, kw_only=True):
+    id: str
+    createdAt: Optional[str] = None
+    version: Optional[float] = 1
+
+
+class Child(Base, kw_only=True):
+    title: str
+    url: Optional[str] = 'https://example.com'
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_keyword_only_omit_defaults.py 0.34.0-1/tests/data/expected/main/openapi/msgspec_keyword_only_omit_defaults.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_keyword_only_omit_defaults.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/msgspec_keyword_only_omit_defaults.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  inheritance.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from msgspec import Struct
+
+
+class Base(Struct, omit_defaults=True, kw_only=True):
+    id: str
+    createdAt: Optional[str] = None
+    version: Optional[float] = 1
+
+
+class Child(Base, omit_defaults=True, kw_only=True):
+    title: str
+    url: Optional[str] = 'https://example.com'
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_struct.py 0.34.0-1/tests/data/expected/main/openapi/msgspec_struct.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_struct.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/msgspec_struct.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,64 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Optional
+
+from msgspec import Meta, Struct
+
+
+class Pet(Struct):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+Pets = List[Pet]
+
+
+class User(Struct):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+Users = List[User]
+
+
+Id = str
+
+
+Rules = List[str]
+
+
+class Error(Struct):
+    code: int
+    message: str
+
+
+class Api(Struct):
+    apiKey: Optional[
+        Annotated[str, Meta(description='To be used as a dataset parameter value')]
+    ] = None
+    apiVersionNumber: Optional[
+        Annotated[str, Meta(description='To be used as a version parameter value')]
+    ] = None
+    apiUrl: Optional[
+        Annotated[str, Meta(description="The URL describing the dataset's fields")]
+    ] = None
+    apiDocumentationUrl: Optional[
+        Annotated[str, Meta(description='A URL to the API console for each API')]
+    ] = None
+
+
+Apis = List[Api]
+
+
+class Event(Struct):
+    name: Optional[str] = None
+
+
+class Result(Struct):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_struct_snake_case.py 0.34.0-1/tests/data/expected/main/openapi/msgspec_struct_snake_case.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_struct_snake_case.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/msgspec_struct_snake_case.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,65 @@
+# generated by datamodel-codegen:
+#   filename:  api_ordered_required_fields.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Optional
+
+from msgspec import Meta, Struct, field
+
+
+class Pet(Struct):
+    id: int
+    name: str
+    before_tag: str = field(name='beforeTag')
+    tag: Optional[str] = None
+
+
+Pets = List[Pet]
+
+
+class User(Struct):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+Users = List[User]
+
+
+Id = str
+
+
+Rules = List[str]
+
+
+class Error(Struct):
+    code: int
+    message: str
+
+
+class Api(Struct):
+    api_key: Optional[
+        Annotated[str, Meta(description='To be used as a dataset parameter value')]
+    ] = field(name='apiKey', default=None)
+    api_version_number: Optional[
+        Annotated[str, Meta(description='To be used as a version parameter value')]
+    ] = field(name='apiVersionNumber', default=None)
+    api_url: Optional[
+        Annotated[str, Meta(description="The URL describing the dataset's fields")]
+    ] = field(name='apiUrl', default=None)
+    api_documentation_url: Optional[
+        Annotated[str, Meta(description='A URL to the API console for each API')]
+    ] = field(name='apiDocumentationUrl', default=None)
+
+
+Apis = List[Api]
+
+
+class Event(Struct):
+    name: Optional[str] = None
+
+
+class Result(Struct):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_use_annotated_with_field_constraints.py 0.34.0-1/tests/data/expected/main/openapi/msgspec_use_annotated_with_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_use_annotated_with_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/msgspec_use_annotated_with_field_constraints.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,83 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Optional, Union
+
+from msgspec import Meta, Struct
+
+
+class Pet(Struct):
+    id: Annotated[int, Meta(ge=0, le=9223372036854775807)]
+    name: Annotated[str, Meta(max_length=256)]
+    tag: Optional[Annotated[str, Meta(max_length=64)]] = None
+
+
+Pets = List[Pet]
+
+
+UID = Annotated[int, Meta(ge=0)]
+
+
+Phone = Annotated[str, Meta(min_length=3)]
+
+
+FaxItem = Annotated[str, Meta(min_length=3)]
+
+
+class User(Struct):
+    id: Annotated[int, Meta(ge=0)]
+    name: Annotated[str, Meta(max_length=256)]
+    uid: UID
+    tag: Optional[Annotated[str, Meta(max_length=64)]] = None
+    phones: Optional[List[Phone]] = None
+    fax: Optional[List[FaxItem]] = None
+    height: Optional[Annotated[Union[int, float], Meta(ge=1.0, le=300.0)]] = None
+    weight: Optional[Annotated[Union[float, int], Meta(ge=1.0, le=1000.0)]] = None
+    age: Optional[Annotated[int, Meta(gt=0, le=200)]] = None
+    rating: Optional[Annotated[float, Meta(gt=0.0, le=5.0)]] = None
+
+
+Users = List[User]
+
+
+Id = str
+
+
+Rules = List[str]
+
+
+class Error(Struct):
+    code: int
+    message: str
+
+
+class Api(Struct):
+    apiKey: Optional[
+        Annotated[str, Meta(description='To be used as a dataset parameter value')]
+    ] = None
+    apiVersionNumber: Optional[
+        Annotated[str, Meta(description='To be used as a version parameter value')]
+    ] = None
+    apiUrl: Optional[
+        Annotated[
+            str,
+            Meta(description="The URL describing the dataset's fields", min_length=1),
+        ]
+    ] = None
+    apiDocumentationUrl: Optional[
+        Annotated[str, Meta(description='A URL to the API console for each API')]
+    ] = None
+
+
+Apis = List[Api]
+
+
+class Event(Struct):
+    name: Optional[str] = None
+
+
+class Result(Struct):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/multiple_required_any_of.py 0.34.0-1/tests/data/expected/main/openapi/multiple_required_any_of.py
--- 0.26.4-3/tests/data/expected/main/openapi/multiple_required_any_of.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/multiple_required_any_of.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  multiple_required_any_of.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from ipaddress import IPv4Address, IPv6Address
+from typing import Optional, Union
+
+from pydantic import BaseModel
+
+
+class Addr1(BaseModel):
+    ipv4Addr: IPv4Address
+    ipv6Addr: Optional[IPv6Address] = None
+
+
+class Addr2(BaseModel):
+    ipv4Addr: Optional[IPv4Address] = None
+    ipv6Addr: IPv6Address
+
+
+class Addr(BaseModel):
+    __root__: Union[Addr1, Addr2]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/nested_enum.py 0.34.0-1/tests/data/expected/main/openapi/nested_enum.py
--- 0.26.4-3/tests/data/expected/main/openapi/nested_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/nested_enum.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  nested_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+from pydantic import BaseModel
+
+
+class State(Enum):
+    field_1 = '1'
+    field_2 = '2'
+
+
+class NestedState1(Enum):
+    field_1 = '1'
+    field_2 = '2'
+
+
+class NestedState2(Enum):
+    field_1 = '1'
+    field_2 = '2'
+
+
+class Result1(BaseModel):
+    state: NestedState1
+
+
+class Result2(BaseModel):
+    state: NestedState2
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/no_file.py 0.34.0-1/tests/data/expected/main/openapi/no_file.py
--- 0.26.4-3/tests/data/expected/main/openapi/no_file.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/no_file.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/nullable.py 0.34.0-1/tests/data/expected/main/openapi/nullable.py
--- 0.26.4-3/tests/data/expected/main/openapi/nullable.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/nullable.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,83 @@
+# generated by datamodel-codegen:
+#   filename:  nullable.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Cursors(BaseModel):
+    prev: str
+    next: Optional[str] = 'last'
+    index: float
+    tag: Optional[str] = None
+
+
+class TopLevel(BaseModel):
+    cursors: Cursors
+
+
+class Info(BaseModel):
+    name: str
+
+
+class User(BaseModel):
+    info: Info
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: Optional[List[Api]] = None
+
+
+class EmailItem(BaseModel):
+    author: str
+    address: str = Field(..., description='email address')
+    description: Optional[str] = 'empty'
+    tag: Optional[str] = None
+
+
+class Email(BaseModel):
+    __root__: List[EmailItem]
+
+
+class Id(BaseModel):
+    __root__: int
+
+
+class Description(BaseModel):
+    __root__: Optional[str] = 'example'
+
+
+class Name(BaseModel):
+    __root__: Optional[str] = None
+
+
+class Tag(BaseModel):
+    __root__: str
+
+
+class Notes(BaseModel):
+    comments: List[str] = Field(default_factory=list)
+
+
+class Options(BaseModel):
+    comments: List[str]
+    oneOfComments: List[Union[str, float]]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/nullable_31.py 0.34.0-1/tests/data/expected/main/openapi/nullable_31.py
--- 0.26.4-3/tests/data/expected/main/openapi/nullable_31.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/nullable_31.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  nullable_31.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import BaseModel
+
+
+class Apple(BaseModel):
+    pass
+
+
+class Basket(BaseModel):
+    apples: List[Apple] | None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/nullable_strict_nullable.py 0.34.0-1/tests/data/expected/main/openapi/nullable_strict_nullable.py
--- 0.26.4-3/tests/data/expected/main/openapi/nullable_strict_nullable.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/nullable_strict_nullable.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,83 @@
+# generated by datamodel-codegen:
+#   filename:  nullable.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Cursors(BaseModel):
+    prev: Optional[str] = Field(...)
+    next: str = 'last'
+    index: float
+    tag: Optional[str] = None
+
+
+class TopLevel(BaseModel):
+    cursors: Cursors
+
+
+class Info(BaseModel):
+    name: str
+
+
+class User(BaseModel):
+    info: Info
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: Optional[List[Api]] = Field(...)
+
+
+class EmailItem(BaseModel):
+    author: str
+    address: str = Field(..., description='email address')
+    description: str = 'empty'
+    tag: Optional[str] = None
+
+
+class Email(BaseModel):
+    __root__: List[EmailItem]
+
+
+class Id(BaseModel):
+    __root__: int
+
+
+class Description(BaseModel):
+    __root__: Optional[str] = 'example'
+
+
+class Name(BaseModel):
+    __root__: Optional[str] = None
+
+
+class Tag(BaseModel):
+    __root__: str
+
+
+class Notes(BaseModel):
+    comments: List[str] = Field(default_factory=list)
+
+
+class Options(BaseModel):
+    comments: List[Optional[str]]
+    oneOfComments: List[Union[Optional[str], Optional[float]]]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/nullable_strict_nullable_use_union_operator.py 0.34.0-1/tests/data/expected/main/openapi/nullable_strict_nullable_use_union_operator.py
--- 0.26.4-3/tests/data/expected/main/openapi/nullable_strict_nullable_use_union_operator.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/nullable_strict_nullable_use_union_operator.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,83 @@
+# generated by datamodel-codegen:
+#   filename:  nullable.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Cursors(BaseModel):
+    prev: str | None = Field(...)
+    next: str = 'last'
+    index: float
+    tag: str | None = None
+
+
+class TopLevel(BaseModel):
+    cursors: Cursors
+
+
+class Info(BaseModel):
+    name: str
+
+
+class User(BaseModel):
+    info: Info
+
+
+class Api(BaseModel):
+    apiKey: str | None = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: str | None = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: AnyUrl | None = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: AnyUrl | None = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api] | None = Field(...)
+
+
+class EmailItem(BaseModel):
+    author: str
+    address: str = Field(..., description='email address')
+    description: str = 'empty'
+    tag: str | None = None
+
+
+class Email(BaseModel):
+    __root__: List[EmailItem]
+
+
+class Id(BaseModel):
+    __root__: int
+
+
+class Description(BaseModel):
+    __root__: str | None = 'example'
+
+
+class Name(BaseModel):
+    __root__: str | None = None
+
+
+class Tag(BaseModel):
+    __root__: str
+
+
+class Notes(BaseModel):
+    comments: List[str] = Field(default_factory=list)
+
+
+class Options(BaseModel):
+    comments: List[str | None]
+    oneOfComments: List[str | float | None]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/oas_response_reference.py 0.34.0-1/tests/data/expected/main/openapi/oas_response_reference.py
--- 0.26.4-3/tests/data/expected/main/openapi/oas_response_reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/oas_response_reference.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  oas_response_reference.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class PetsGetResponse(BaseModel):
+    __root__: List[Pet]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/override_required_all_of.py 0.34.0-1/tests/data/expected/main/openapi/override_required_all_of.py
--- 0.26.4-3/tests/data/expected/main/openapi/override_required_all_of.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/override_required_all_of.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  override_required_all_of.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Type(Enum):
+    my_first_object = 'my_first_object'
+    my_second_object = 'my_second_object'
+
+
+class ObjectBase(BaseModel):
+    name: Optional[str] = Field(None, description='Name of the object')
+    type: Optional[Type] = Field(None, description='Object type')
+    rank: Optional[Union[int, float]] = Field(None, description='User rank')
+    allIn: Optional[Union[Type, str, Union[int, float]]] = None
+
+
+class CreateObjectRequest(ObjectBase):
+    name: str = Field(..., description='Name of the object')
+    type: Type = Field(..., description='Object type')
+    rank: Union[int, float] = Field(..., description='User rank')
+    allIn: Union[Type, str, Union[int, float]]
+
+
+class UpdateObjectRequest(ObjectBase):
+    pass
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pattern/general.py 0.34.0-1/tests/data/expected/main/openapi/pattern/general.py
--- 0.26.4-3/tests/data/expected/main/openapi/pattern/general.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/pattern/general.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,22 @@
+# generated by datamodel-codegen:
+#   filename:  pattern.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, constr
+
+
+class Info(BaseModel):
+    hostName: Optional[
+        constr(
+            regex=r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])\Z'
+        )
+    ] = None
+    arn: Optional[
+        constr(regex=r'(^arn:([^:]*):([^:]*):([^:]*):(|\*|[\d]{12}):(.+)$)|^\*$')
+    ] = None
+    tel: Optional[constr(regex=r'^(\([0-9]{3}\))?[0-9]{3}-[0-9]{4}$')] = None
+    comment: Optional[constr(regex=r'[^\b\f\n\r\t\\a+.?\'"|()]+$')] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pattern/msgspec_pattern.py 0.34.0-1/tests/data/expected/main/openapi/pattern/msgspec_pattern.py
--- 0.26.4-3/tests/data/expected/main/openapi/pattern/msgspec_pattern.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/pattern/msgspec_pattern.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  pattern.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Optional
+
+from msgspec import Meta, Struct
+
+
+class Info(Struct):
+    hostName: Optional[str] = None
+    arn: Optional[
+        Annotated[
+            str,
+            Meta(pattern='(^arn:([^:]*):([^:]*):([^:]*):(|\\*|[\\d]{12}):(.+)$)|^\\*$'),
+        ]
+    ] = None
+    tel: Optional[
+        Annotated[str, Meta(pattern='^(\\([0-9]{3}\\))?[0-9]{3}-[0-9]{4}$')]
+    ] = None
+    comment: Optional[
+        Annotated[str, Meta(pattern='[^\\b\\f\\n\\r\\t\\\\a+.?\'"|()]+$')]
+    ] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pattern/pydantic_v2.py 0.34.0-1/tests/data/expected/main/openapi/pattern/pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/pattern/pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/pattern/pydantic_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,22 @@
+# generated by datamodel-codegen:
+#   filename:  pattern.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, constr
+
+
+class Info(BaseModel):
+    hostName: Optional[
+        constr(
+            pattern=r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])$'
+        )
+    ] = None
+    arn: Optional[
+        constr(pattern=r'(^arn:([^:]*):([^:]*):([^:]*):(|\*|[\d]{12}):(.+)$)|^\*$')
+    ] = None
+    tel: Optional[constr(pattern=r'^(\([0-9]{3}\))?[0-9]{3}-[0-9]{4}$')] = None
+    comment: Optional[constr(pattern=r'[^\b\f\n\r\t\\a+.?\'"|()]+$')] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pattern_with_lookaround_pydantic_v2.py 0.34.0-1/tests/data/expected/main/openapi/pattern_with_lookaround_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/pattern_with_lookaround_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/pattern_with_lookaround_pydantic_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  pattern_lookaround.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, ConfigDict, constr
+
+
+class Info(BaseModel):
+    model_config = ConfigDict(
+        regex_engine="python-re",
+    )
+    name: Optional[constr(pattern=r'.*foo.*(?<!baz)bar.*')] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pattern_with_lookaround_pydantic_v2_field_constraints.py 0.34.0-1/tests/data/expected/main/openapi/pattern_with_lookaround_pydantic_v2_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/openapi/pattern_with_lookaround_pydantic_v2_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/pattern_with_lookaround_pydantic_v2_field_constraints.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  pattern_lookaround.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, ConfigDict, Field
+
+
+class Info(BaseModel):
+    model_config = ConfigDict(
+        regex_engine="python-re",
+    )
+    name: Optional[str] = Field(None, pattern='.*foo.*(?<!baz)bar.*')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2.py 0.34.0-1/tests/data/expected/main/openapi/pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/pydantic_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field, RootModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(RootModel[List[Pet]]):
+    root: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(RootModel[List[User]]):
+    root: List[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[List[str]]):
+    root: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[List[Api]]):
+    root: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_default_object/Another.py 0.34.0-1/tests/data/expected/main/openapi/pydantic_v2_default_object/Another.py
--- 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_default_object/Another.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/pydantic_v2_default_object/Another.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field, RootModel
+
+from . import Foo as Foo_1
+from . import Nested
+
+
+class Foo(RootModel[str]):
+    root: str
+
+
+class Bar(BaseModel):
+    original_foo: Optional[Foo_1] = Field(
+        default_factory=lambda: Foo_1.model_validate({'text': 'abc', 'number': 123})
+    )
+    nested_foo: Optional[List[Nested.Foo]] = Field(
+        default_factory=lambda: [Nested.Foo.model_validate(v) for v in ['abc', 'efg']]
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_default_object/Nested.py 0.34.0-1/tests/data/expected/main/openapi/pydantic_v2_default_object/Nested.py
--- 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_default_object/Nested.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/pydantic_v2_default_object/Nested.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,30 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field, RootModel
+
+from . import Foo as Foo_1
+
+
+class Foo(RootModel[str]):
+    root: str
+
+
+class Bar(BaseModel):
+    foo: Optional[Foo_1] = Field(
+        default_factory=lambda: Foo_1.model_validate({'text': 'abc', 'number': 123})
+    )
+    baz: Optional[List[Foo_1]] = Field(
+        default_factory=lambda: [
+            Foo_1.model_validate(v)
+            for v in [{'text': 'abc', 'number': 123}, {'text': 'efg', 'number': 456}]
+        ]
+    )
+    nested_foo: Optional[Foo] = Field(
+        default_factory=lambda: Foo.model_validate('default foo')
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_default_object/__init__.py 0.34.0-1/tests/data/expected/main/openapi/pydantic_v2_default_object/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_default_object/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/pydantic_v2_default_object/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Foo(BaseModel):
+    text: Optional[str] = '987'
+    number: Optional[float] = None
+
+
+class Bar(BaseModel):
+    foo: Optional[Foo] = Field(
+        default_factory=lambda: Foo.model_validate({'text': 'abc', 'number': 123})
+    )
+    baz: Optional[List[Foo]] = Field(
+        default_factory=lambda: [
+            Foo.model_validate(v)
+            for v in [{'text': 'abc', 'number': 123}, {'text': 'efg', 'number': 456}]
+        ]
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pyproject.py 0.34.0-1/tests/data/expected/main/openapi/pyproject.py
--- 0.26.4-3/tests/data/expected/main/openapi/pyproject.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/pyproject.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,94 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import (
+    annotations,
+)
+
+from typing import (
+    List,
+    Optional,
+)
+
+from pydantic import (
+    AnyUrl,
+    BaseModel,
+    Field,
+)
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str]
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str]
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    api_key: Optional[
+        str
+    ] = Field(
+        None,
+        alias="apiKey",
+        description="To be used as a dataset parameter value",
+    )
+    api_version_number: Optional[
+        str
+    ] = Field(
+        None,
+        alias="apiVersionNumber",
+        description="To be used as a version parameter value",
+    )
+    api_url: Optional[
+        AnyUrl
+    ] = Field(
+        None,
+        alias="apiUrl",
+        description="The URL describing the dataset's fields",
+    )
+    api_documentation_url: Optional[
+        AnyUrl
+    ] = Field(
+        None,
+        alias="apiDocumentationUrl",
+        description="A URL to the API console for each API",
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str]
+
+
+class Result(BaseModel):
+    event: Optional[Event]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pyproject_not_found.py 0.34.0-1/tests/data/expected/main/openapi/pyproject_not_found.py
--- 0.26.4-3/tests/data/expected/main/openapi/pyproject_not_found.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/pyproject_not_found.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/reference_same_hierarchy_directory.py 0.34.0-1/tests/data/expected/main/openapi/reference_same_hierarchy_directory.py
--- 0.26.4-3/tests/data/expected/main/openapi/reference_same_hierarchy_directory.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/reference_same_hierarchy_directory.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  entities.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class CatDetails(BaseModel):
+    name: str = Field(..., description='Name of this cat')
+    birthYear: float = Field(..., description="Year of this cat's birth")
+
+
+class CatInfo(BaseModel):
+    cat_id: str = Field(..., description='ID of this cat')
+    details: Optional[CatDetails] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/reference_to_object_properties.py 0.34.0-1/tests/data/expected/main/openapi/reference_to_object_properties.py
--- 0.26.4-3/tests/data/expected/main/openapi/reference_to_object_properties.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/reference_to_object_properties.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,31 @@
+# generated by datamodel-codegen:
+#   filename:  reference_to_object_properties.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Pet(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class Parent(BaseModel):
+    id: Optional[Id] = None
+    name: Optional[str] = None
+    pet: Optional[Pet] = None
+
+
+class Child(BaseModel):
+    id: Optional[Id] = None
+    parent_id: Optional[Id] = None
+    name: Optional[str] = None
+    pet: Optional[Pet] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/reference_to_object_properties_collapse_root_models.py 0.34.0-1/tests/data/expected/main/openapi/reference_to_object_properties_collapse_root_models.py
--- 0.26.4-3/tests/data/expected/main/openapi/reference_to_object_properties_collapse_root_models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/reference_to_object_properties_collapse_root_models.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  reference_to_object_properties.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class Parent(BaseModel):
+    id: Optional[str] = None
+    name: Optional[str] = None
+    pet: Optional[Pet] = None
+
+
+class Child(BaseModel):
+    id: Optional[str] = None
+    parent_id: Optional[str] = None
+    name: Optional[str] = None
+    pet: Optional[Pet] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/referenced_default.py 0.34.0-1/tests/data/expected/main/openapi/referenced_default.py
--- 0.26.4-3/tests/data/expected/main/openapi/referenced_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/referenced_default.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  referenced_default.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field, RootModel, confloat
+
+
+class ModelSettingB(RootModel[confloat(ge=0.0, le=10.0)]):
+    root: confloat(ge=0.0, le=10.0)
+
+
+class Model(BaseModel):
+    settingA: Optional[confloat(ge=0.0, le=10.0)] = 5
+    settingB: Optional[ModelSettingB] = Field(
+        default_factory=lambda: ModelSettingB.model_validate(5)
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/same_name_objects.py 0.34.0-1/tests/data/expected/main/openapi/same_name_objects.py
--- 0.26.4-3/tests/data/expected/main/openapi/same_name_objects.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/same_name_objects.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,43 @@
+# generated by datamodel-codegen:
+#   filename:  same_name_objects.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Extra
+
+
+class Pets(BaseModel):
+    pass
+
+    class Config:
+        extra = Extra.forbid
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Resolved(BaseModel):
+    resolved: Optional[List[str]] = None
+
+
+class PetsModel(BaseModel):
+    __root__: List[Pet]
+
+
+class Friends2(BaseModel):
+    __root__: PetsModel
+
+
+class Friends1(BaseModel):
+    __root__: PetsModel
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/shadowed_imports.py 0.34.0-1/tests/data/expected/main/openapi/shadowed_imports.py
--- 0.26.4-3/tests/data/expected/main/openapi/shadowed_imports.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/shadowed_imports.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  shadowed_imports.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import date as date_aliased
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class MarketingOptIn(BaseModel):
+    optedIn: Optional[bool] = Field(None, examples=[False])
+    date: Optional[date_aliased] = Field(None, examples=['2018-04-26T17:03:25.155Z'])
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/special_yaml_keywords.py 0.34.0-1/tests/data/expected/main/openapi/special_yaml_keywords.py
--- 0.26.4-3/tests/data/expected/main/openapi/special_yaml_keywords.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/special_yaml_keywords.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,30 @@
+# generated by datamodel-codegen:
+#   filename:  special_yaml_keywords.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class None1(BaseModel):
+    pass
+
+
+class False1(BaseModel):
+    pass
+
+
+class True1(BaseModel):
+    pass
+
+
+class On(BaseModel):
+    pass
+
+
+class NestedKeywords(BaseModel):
+    None_: None1 = Field(..., alias='None')
+    false: False1
+    True_: True1 = Field(..., alias='True')
+    on: On
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/stdin.py 0.34.0-1/tests/data/expected/main/openapi/stdin.py
--- 0.26.4-3/tests/data/expected/main/openapi/stdin.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/stdin.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  <stdin>
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/subclass_enum.py 0.34.0-1/tests/data/expected/main/openapi/subclass_enum.py
--- 0.26.4-3/tests/data/expected/main/openapi/subclass_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/subclass_enum.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,22 @@
+# generated by datamodel-codegen:
+#   filename:  subclass_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class ProcessingStatus(Enum):
+    COMPLETED = 'COMPLETED'
+    PENDING = 'PENDING'
+    FAILED = 'FAILED'
+
+
+class ProcessingTask(BaseModel):
+    processing_status: Optional[ProcessingStatus] = Field(
+        'COMPLETED', title='Status of the task'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/target_python_version.py 0.34.0-1/tests/data/expected/main/openapi/target_python_version.py
--- 0.26.4-3/tests/data/expected/main/openapi/target_python_version.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/target_python_version.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/typed_dict.py 0.34.0-1/tests/data/expected/main/openapi/typed_dict.py
--- 0.26.4-3/tests/data/expected/main/openapi/typed_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/typed_dict.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,56 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, TypedDict
+
+from typing_extensions import NotRequired
+
+
+class Pet(TypedDict):
+    id: int
+    name: str
+    tag: NotRequired[str]
+
+
+Pets = List[Pet]
+
+
+class User(TypedDict):
+    id: int
+    name: str
+    tag: NotRequired[str]
+
+
+Users = List[User]
+
+
+Id = str
+
+
+Rules = List[str]
+
+
+class Error(TypedDict):
+    code: int
+    message: str
+
+
+class Api(TypedDict):
+    apiKey: NotRequired[str]
+    apiVersionNumber: NotRequired[str]
+    apiUrl: NotRequired[str]
+    apiDocumentationUrl: NotRequired[str]
+
+
+Apis = List[Api]
+
+
+class Event(TypedDict):
+    name: NotRequired[str]
+
+
+class Result(TypedDict):
+    event: NotRequired[Event]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/typed_dict_nullable.py 0.34.0-1/tests/data/expected/main/openapi/typed_dict_nullable.py
--- 0.26.4-3/tests/data/expected/main/openapi/typed_dict_nullable.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/typed_dict_nullable.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,67 @@
+# generated by datamodel-codegen:
+#   filename:  nullable.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, NotRequired, Optional, TypedDict, Union
+
+
+class Cursors(TypedDict):
+    prev: str
+    next: NotRequired[str]
+    index: float
+    tag: NotRequired[str]
+
+
+class TopLevel(TypedDict):
+    cursors: Cursors
+
+
+class Info(TypedDict):
+    name: str
+
+
+class User(TypedDict):
+    info: Info
+
+
+class Api(TypedDict):
+    apiKey: NotRequired[str]
+    apiVersionNumber: NotRequired[str]
+    apiUrl: NotRequired[str]
+    apiDocumentationUrl: NotRequired[str]
+
+
+Apis = Optional[List[Api]]
+
+
+class EmailItem(TypedDict):
+    author: str
+    address: str
+    description: NotRequired[str]
+    tag: NotRequired[str]
+
+
+Email = List[EmailItem]
+
+
+Id = int
+
+
+Description = Optional[str]
+
+
+Name = Optional[str]
+
+
+Tag = str
+
+
+class Notes(TypedDict):
+    comments: NotRequired[List[str]]
+
+
+class Options(TypedDict):
+    comments: List[str]
+    oneOfComments: List[Union[str, float]]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/typed_dict_nullable_strict_nullable.py 0.34.0-1/tests/data/expected/main/openapi/typed_dict_nullable_strict_nullable.py
--- 0.26.4-3/tests/data/expected/main/openapi/typed_dict_nullable_strict_nullable.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/typed_dict_nullable_strict_nullable.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,67 @@
+# generated by datamodel-codegen:
+#   filename:  nullable.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, NotRequired, Optional, TypedDict, Union
+
+
+class Cursors(TypedDict):
+    prev: Optional[str]
+    next: NotRequired[str]
+    index: float
+    tag: NotRequired[str]
+
+
+class TopLevel(TypedDict):
+    cursors: Cursors
+
+
+class Info(TypedDict):
+    name: str
+
+
+class User(TypedDict):
+    info: Info
+
+
+class Api(TypedDict):
+    apiKey: NotRequired[str]
+    apiVersionNumber: NotRequired[str]
+    apiUrl: NotRequired[Optional[str]]
+    apiDocumentationUrl: NotRequired[Optional[str]]
+
+
+Apis = Optional[List[Api]]
+
+
+class EmailItem(TypedDict):
+    author: str
+    address: str
+    description: NotRequired[str]
+    tag: NotRequired[str]
+
+
+Email = List[EmailItem]
+
+
+Id = int
+
+
+Description = Optional[str]
+
+
+Name = Optional[str]
+
+
+Tag = str
+
+
+class Notes(TypedDict):
+    comments: NotRequired[List[str]]
+
+
+class Options(TypedDict):
+    comments: List[Optional[str]]
+    oneOfComments: List[Union[Optional[str], Optional[float]]]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/typed_dict_py.py 0.34.0-1/tests/data/expected/main/openapi/typed_dict_py.py
--- 0.26.4-3/tests/data/expected/main/openapi/typed_dict_py.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/typed_dict_py.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,56 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, TypedDict
+
+from typing_extensions import NotRequired
+
+
+class Pet(TypedDict):
+    id: int
+    name: str
+    tag: NotRequired[str]
+
+
+Pets = List[Pet]
+
+
+class User(TypedDict):
+    id: int
+    name: str
+    tag: NotRequired[str]
+
+
+Users = List[User]
+
+
+Id = str
+
+
+Rules = List[str]
+
+
+class Error(TypedDict):
+    code: int
+    message: str
+
+
+class Api(TypedDict):
+    apiKey: NotRequired[str]
+    apiVersionNumber: NotRequired[str]
+    apiUrl: NotRequired[str]
+    apiDocumentationUrl: NotRequired[str]
+
+
+Apis = List[Api]
+
+
+class Event(TypedDict):
+    name: NotRequired[str]
+
+
+class Result(TypedDict):
+    event: NotRequired[Event]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/unsorted_optional_fields.py 0.34.0-1/tests/data/expected/main/openapi/unsorted_optional_fields.py
--- 0.26.4-3/tests/data/expected/main/openapi/unsorted_optional_fields.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/unsorted_optional_fields.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  unsorted_optional_fields.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Optional
+
+
+@dataclass
+class Note:
+    text: str
+    author: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_annotated_with_field_constraints.py 0.34.0-1/tests/data/expected/main/openapi/use_annotated_with_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_annotated_with_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_annotated_with_field_constraints.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: Annotated[int, Field(ge=0, le=9223372036854775807)]
+    name: Annotated[str, Field(max_length=256)]
+    tag: Annotated[Optional[str], Field(max_length=64)] = None
+
+
+class Pets(BaseModel):
+    __root__: Annotated[List[Pet], Field(max_items=10, min_items=1, unique_items=True)]
+
+
+class UID(BaseModel):
+    __root__: Annotated[int, Field(ge=0)]
+
+
+class Phone(BaseModel):
+    __root__: Annotated[str, Field(min_length=3)]
+
+
+class FaxItem(BaseModel):
+    __root__: Annotated[str, Field(min_length=3)]
+
+
+class User(BaseModel):
+    id: Annotated[int, Field(ge=0)]
+    name: Annotated[str, Field(max_length=256)]
+    tag: Annotated[Optional[str], Field(max_length=64)] = None
+    uid: UID
+    phones: Annotated[Optional[List[Phone]], Field(max_items=10)] = None
+    fax: Optional[List[FaxItem]] = None
+    height: Annotated[Optional[Union[int, float]], Field(ge=1.0, le=300.0)] = None
+    weight: Annotated[Optional[Union[float, int]], Field(ge=1.0, le=1000.0)] = None
+    age: Annotated[Optional[int], Field(gt=0, le=200)] = None
+    rating: Annotated[Optional[float], Field(gt=0.0, le=5.0)] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Annotated[
+        Optional[str], Field(description='To be used as a dataset parameter value')
+    ] = None
+    apiVersionNumber: Annotated[
+        Optional[str], Field(description='To be used as a version parameter value')
+    ] = None
+    apiUrl: Annotated[
+        Optional[AnyUrl], Field(description="The URL describing the dataset's fields")
+    ] = None
+    apiDocumentationUrl: Annotated[
+        Optional[AnyUrl], Field(description='A URL to the API console for each API')
+    ] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_annotated_with_field_constraints_py38.py 0.34.0-1/tests/data/expected/main/openapi/use_annotated_with_field_constraints_py38.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_annotated_with_field_constraints_py38.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_annotated_with_field_constraints_py38.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: Annotated[int, Field(ge=0, le=9223372036854775807)]
+    name: Annotated[str, Field(max_length=256)]
+    tag: Annotated[Optional[str], Field(max_length=64)] = None
+
+
+class Pets(BaseModel):
+    __root__: Annotated[List[Pet], Field(max_items=10, min_items=1, unique_items=True)]
+
+
+class UID(BaseModel):
+    __root__: Annotated[int, Field(ge=0)]
+
+
+class Phone(BaseModel):
+    __root__: Annotated[str, Field(min_length=3)]
+
+
+class FaxItem(BaseModel):
+    __root__: Annotated[str, Field(min_length=3)]
+
+
+class User(BaseModel):
+    id: Annotated[int, Field(ge=0)]
+    name: Annotated[str, Field(max_length=256)]
+    tag: Annotated[Optional[str], Field(max_length=64)] = None
+    uid: UID
+    phones: Annotated[Optional[List[Phone]], Field(max_items=10)] = None
+    fax: Optional[List[FaxItem]] = None
+    height: Annotated[Optional[Union[int, float]], Field(ge=1.0, le=300.0)] = None
+    weight: Annotated[Optional[Union[float, int]], Field(ge=1.0, le=1000.0)] = None
+    age: Annotated[Optional[int], Field(gt=0, le=200)] = None
+    rating: Annotated[Optional[float], Field(gt=0.0, le=5.0)] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Annotated[
+        Optional[str], Field(description='To be used as a dataset parameter value')
+    ] = None
+    apiVersionNumber: Annotated[
+        Optional[str], Field(description='To be used as a version parameter value')
+    ] = None
+    apiUrl: Annotated[
+        Optional[AnyUrl], Field(description="The URL describing the dataset's fields")
+    ] = None
+    apiDocumentationUrl: Annotated[
+        Optional[AnyUrl], Field(description='A URL to the API console for each API')
+    ] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_annotated_with_field_constraints_pydantic_v2.py 0.34.0-1/tests/data/expected/main/openapi/use_annotated_with_field_constraints_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_annotated_with_field_constraints_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_annotated_with_field_constraints_pydantic_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field, RootModel
+
+
+class Pet(BaseModel):
+    id: Annotated[int, Field(ge=0, le=9223372036854775807)]
+    name: Annotated[str, Field(max_length=256)]
+    tag: Annotated[Optional[str], Field(max_length=64)] = None
+
+
+class Pets(RootModel[List[Pet]]):
+    root: Annotated[List[Pet], Field(max_length=10, min_length=1)]
+
+
+class UID(RootModel[int]):
+    root: Annotated[int, Field(ge=0)]
+
+
+class Phone(RootModel[str]):
+    root: Annotated[str, Field(min_length=3)]
+
+
+class FaxItem(RootModel[str]):
+    root: Annotated[str, Field(min_length=3)]
+
+
+class User(BaseModel):
+    id: Annotated[int, Field(ge=0)]
+    name: Annotated[str, Field(max_length=256)]
+    tag: Annotated[Optional[str], Field(max_length=64)] = None
+    uid: UID
+    phones: Annotated[Optional[List[Phone]], Field(max_length=10)] = None
+    fax: Optional[List[FaxItem]] = None
+    height: Annotated[Optional[Union[int, float]], Field(ge=1.0, le=300.0)] = None
+    weight: Annotated[Optional[Union[float, int]], Field(ge=1.0, le=1000.0)] = None
+    age: Annotated[Optional[int], Field(gt=0, le=200)] = None
+    rating: Annotated[Optional[float], Field(gt=0.0, le=5.0)] = None
+
+
+class Users(RootModel[List[User]]):
+    root: List[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[List[str]]):
+    root: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Annotated[
+        Optional[str], Field(description='To be used as a dataset parameter value')
+    ] = None
+    apiVersionNumber: Annotated[
+        Optional[str], Field(description='To be used as a version parameter value')
+    ] = None
+    apiUrl: Annotated[
+        Optional[AnyUrl], Field(description="The URL describing the dataset's fields")
+    ] = None
+    apiDocumentationUrl: Annotated[
+        Optional[AnyUrl], Field(description='A URL to the API console for each API')
+    ] = None
+
+
+class Apis(RootModel[List[Api]]):
+    root: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_default.py 0.34.0-1/tests/data/expected/main/openapi/use_default.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_default.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: Optional[int] = 1
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_default_kwarg.py 0.34.0-1/tests/data/expected/main/openapi/use_default_kwarg.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_default_kwarg.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_default_kwarg.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,83 @@
+# generated by datamodel-codegen:
+#   filename:  nullable.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Cursors(BaseModel):
+    prev: str
+    next: Optional[str] = 'last'
+    index: float
+    tag: Optional[str] = None
+
+
+class TopLevel(BaseModel):
+    cursors: Cursors
+
+
+class Info(BaseModel):
+    name: str
+
+
+class User(BaseModel):
+    info: Info
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        default=None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        default=None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        default=None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        default=None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: Optional[List[Api]] = None
+
+
+class EmailItem(BaseModel):
+    author: str
+    address: str = Field(..., description='email address')
+    description: Optional[str] = 'empty'
+    tag: Optional[str] = None
+
+
+class Email(BaseModel):
+    __root__: List[EmailItem]
+
+
+class Id(BaseModel):
+    __root__: int
+
+
+class Description(BaseModel):
+    __root__: Optional[str] = 'example'
+
+
+class Name(BaseModel):
+    __root__: Optional[str] = None
+
+
+class Tag(BaseModel):
+    __root__: str
+
+
+class Notes(BaseModel):
+    comments: List[str] = Field(default_factory=list)
+
+
+class Options(BaseModel):
+    comments: List[str]
+    oneOfComments: List[Union[str, float]]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/__init__.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import foo as foo_1
+from . import models
+from .nested import foo as foo_2
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[foo_1.Tea] = None
+    nested: Optional[foo_2.Tea] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/collections.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/collections.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,51 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional, Sequence
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: Sequence[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: Sequence[models.User]
+
+
+class Rules(BaseModel):
+    __root__: Sequence[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: Sequence[Api]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/foo/__init__.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/foo/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import Id
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/foo/bar.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/foo/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Mapping, Optional, Sequence
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[Mapping[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[Sequence[Mapping[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/models.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/models.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,35 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Mapping, Optional, Sequence, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[
+        Union[str, float, int, bool, Mapping[str, Any], Sequence[str]]
+    ] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/nested/__init__.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/nested/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/nested/foo.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/nested/foo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Sequence
+
+from pydantic import BaseModel
+
+from .. import Id, OptionalModel
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea] = None
+    optional: Optional[Sequence[OptionalModel]] = None
+
+
+class TeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea] = None
+    optional: Optional[Sequence[OptionalModel]] = None
+
+
+class List(BaseModel):
+    __root__: Sequence[Tea]
+
+
+Tea.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/woo/__init__.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/woo/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/woo/boo.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types/woo/boo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import Source, bar, foo
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[foo.Cocoa] = None
+    field: Optional[bar.FieldModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/__init__.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import foo as foo_1
+from . import models
+from .nested import foo as foo_2
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[foo_1.Tea] = None
+    nested: Optional[foo_2.Tea] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/collections.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/collections.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,52 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from collections.abc import Sequence
+from enum import Enum
+from typing import Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: Sequence[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: Sequence[models.User]
+
+
+class Rules(BaseModel):
+    __root__: Sequence[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: Sequence[Api]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/foo/__init__.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/foo/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import Id
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/foo/bar.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/foo/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from collections.abc import Mapping, Sequence
+from typing import Any, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[Mapping[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[Sequence[Mapping[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/models.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/models.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,36 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from collections.abc import Mapping, Sequence
+from enum import Enum
+from typing import Any, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[
+        Union[str, float, int, bool, Mapping[str, Any], Sequence[str]]
+    ] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/nested/__init__.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/nested/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/nested/foo.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/nested/foo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from collections.abc import Sequence
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import Id, OptionalModel
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea] = None
+    optional: Optional[Sequence[OptionalModel]] = None
+
+
+class TeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea] = None
+    optional: Optional[Sequence[OptionalModel]] = None
+
+
+class List(BaseModel):
+    __root__: Sequence[Tea]
+
+
+Tea.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/woo/__init__.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/woo/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/woo/boo.py 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/woo/boo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import Source, bar, foo
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[foo.Cocoa] = None
+    field: Optional[bar.FieldModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_operation_id_as_name.py 0.34.0-1/tests/data/expected/main/openapi/use_operation_id_as_name.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_operation_id_as_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_operation_id_as_name.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,73 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
+
+
+class ListPetsParametersQuery(BaseModel):
+    limit: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/__init__.py 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import foo as foo_1
+from . import models
+from .nested import foo as foo_2
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[foo_1.Tea] = None
+    nested: Optional[foo_2.Tea] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/collections.py 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/collections.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,51 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: list[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: list[models.User]
+
+
+class Rules(BaseModel):
+    __root__: list[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: list[Api]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/foo/__init__.py 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/foo/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import Id
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/foo/bar.py 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/foo/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[dict[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[list[dict[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/models.py 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/models.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[Union[str, float, int, bool, dict[str, Any], list[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/nested/__init__.py 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/nested/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/nested/foo.py 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/nested/foo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import Id, OptionalModel
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea] = None
+    optional: Optional[list[OptionalModel]] = None
+
+
+class TeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea] = None
+    optional: Optional[list[OptionalModel]] = None
+
+
+class List(BaseModel):
+    __root__: list[Tea]
+
+
+Tea.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/woo/__init__.py 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/woo/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/woo/boo.py 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/use_standard_collections/woo/boo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import Source, bar, foo
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[foo.Cocoa] = None
+    field: Optional[bar.FieldModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/validation.py 0.34.0-1/tests/data/expected/main/openapi/validation.py
--- 0.26.4-3/tests/data/expected/main/openapi/validation.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/validation.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_aliases.py 0.34.0-1/tests/data/expected/main/openapi/with_aliases.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_aliases.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/with_aliases.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id_: int = Field(..., alias='id')
+    name_: str = Field(..., alias='name')
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id_: int = Field(..., alias='id')
+    name_: str = Field(..., alias='name')
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name_: Optional[str] = Field(None, alias='name')
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_aliases_msgspec.py 0.34.0-1/tests/data/expected/main/openapi/with_aliases_msgspec.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_aliases_msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/with_aliases_msgspec.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,64 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Optional
+
+from msgspec import Meta, Struct, field
+
+
+class Pet(Struct):
+    id_: int = field(name='id')
+    name_: str = field(name='name')
+    tag: Optional[str] = None
+
+
+Pets = List[Pet]
+
+
+class User(Struct):
+    id_: int = field(name='id')
+    name_: str = field(name='name')
+    tag: Optional[str] = None
+
+
+Users = List[User]
+
+
+Id = str
+
+
+Rules = List[str]
+
+
+class Error(Struct):
+    code: int
+    message: str
+
+
+class Api(Struct):
+    apiKey: Optional[
+        Annotated[str, Meta(description='To be used as a dataset parameter value')]
+    ] = None
+    apiVersionNumber: Optional[
+        Annotated[str, Meta(description='To be used as a version parameter value')]
+    ] = None
+    apiUrl: Optional[
+        Annotated[str, Meta(description="The URL describing the dataset's fields")]
+    ] = None
+    apiDocumentationUrl: Optional[
+        Annotated[str, Meta(description='A URL to the API console for each API')]
+    ] = None
+
+
+Apis = List[Api]
+
+
+class Event(Struct):
+    name_: Optional[str] = field(name='name', default=None)
+
+
+class Result(Struct):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_exclusive.py 0.34.0-1/tests/data/expected/main/openapi/with_exclusive.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_exclusive.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/with_exclusive.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  exclusive.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, conint
+
+
+class MaximumProblem(BaseModel):
+    status: Optional[conint(ge=100, lt=600)] = None
+
+
+class MinimumProblem(BaseModel):
+    status: Optional[conint(le=600, gt=100)] = None
+
+
+class MinimumMaximumProblem(BaseModel):
+    status: Optional[conint(lt=600, gt=100)] = None
+
+
+class Problem(BaseModel):
+    status: Optional[conint(ge=100, le=600)] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints.py 0.34.0-1/tests/data/expected/main/openapi/with_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/with_field_constraints.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int = Field(..., ge=0, le=9223372036854775807)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+
+
+class Pets(BaseModel):
+    __root__: List[Pet] = Field(..., max_items=10, min_items=1, unique_items=True)
+
+
+class UID(BaseModel):
+    __root__: int = Field(..., ge=0)
+
+
+class Phone(BaseModel):
+    __root__: str = Field(..., min_length=3)
+
+
+class FaxItem(BaseModel):
+    __root__: str = Field(..., min_length=3)
+
+
+class User(BaseModel):
+    id: int = Field(..., ge=0)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+    uid: UID
+    phones: Optional[List[Phone]] = Field(None, max_items=10)
+    fax: Optional[List[FaxItem]] = None
+    height: Optional[Union[int, float]] = Field(None, ge=1.0, le=300.0)
+    weight: Optional[Union[float, int]] = Field(None, ge=1.0, le=1000.0)
+    age: Optional[int] = Field(None, gt=0, le=200)
+    rating: Optional[float] = Field(None, gt=0.0, le=5.0)
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2.py 0.34.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field, RootModel
+
+
+class Pet(BaseModel):
+    id: int = Field(..., ge=0, le=9223372036854775807)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+
+
+class Pets(RootModel[List[Pet]]):
+    root: List[Pet] = Field(..., max_length=10, min_length=1)
+
+
+class UID(RootModel[int]):
+    root: int = Field(..., ge=0)
+
+
+class Phone(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class FaxItem(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class User(BaseModel):
+    id: int = Field(..., ge=0)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+    uid: UID
+    phones: Optional[List[Phone]] = Field(None, max_length=10)
+    fax: Optional[List[FaxItem]] = None
+    height: Optional[Union[int, float]] = Field(None, ge=1.0, le=300.0)
+    weight: Optional[Union[float, int]] = Field(None, ge=1.0, le=1000.0)
+    age: Optional[int] = Field(None, gt=0, le=200)
+    rating: Optional[float] = Field(None, gt=0.0, le=5.0)
+
+
+class Users(RootModel[List[User]]):
+    root: List[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[List[str]]):
+    root: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[List[Api]]):
+    root: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_generic_container_types.py 0.34.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_generic_container_types.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_generic_container_types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_generic_container_types.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Sequence, Union
+
+from pydantic import AnyUrl, BaseModel, Field, RootModel
+
+
+class Pet(BaseModel):
+    id: int = Field(..., ge=0, le=9223372036854775807)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+
+
+class Pets(RootModel[Sequence[Pet]]):
+    root: Sequence[Pet] = Field(..., max_length=10, min_length=1)
+
+
+class UID(RootModel[int]):
+    root: int = Field(..., ge=0)
+
+
+class Phone(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class FaxItem(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class User(BaseModel):
+    id: int = Field(..., ge=0)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+    uid: UID
+    phones: Optional[Sequence[Phone]] = Field(None, max_length=10)
+    fax: Optional[Sequence[FaxItem]] = None
+    height: Optional[Union[int, float]] = Field(None, ge=1.0, le=300.0)
+    weight: Optional[Union[float, int]] = Field(None, ge=1.0, le=1000.0)
+    age: Optional[int] = Field(None, gt=0, le=200)
+    rating: Optional[float] = Field(None, gt=0.0, le=5.0)
+
+
+class Users(RootModel[Sequence[User]]):
+    root: Sequence[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[Sequence[str]]):
+    root: Sequence[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[Sequence[Api]]):
+    root: Sequence[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_generic_container_types_set.py 0.34.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_generic_container_types_set.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_generic_container_types_set.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_generic_container_types_set.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import FrozenSet, Optional, Sequence, Union
+
+from pydantic import AnyUrl, BaseModel, Field, RootModel
+
+
+class Pet(BaseModel):
+    id: int = Field(..., ge=0, le=9223372036854775807)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+
+
+class Pets(RootModel[FrozenSet[Pet]]):
+    root: FrozenSet[Pet] = Field(..., max_length=10, min_length=1)
+
+
+class UID(RootModel[int]):
+    root: int = Field(..., ge=0)
+
+
+class Phone(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class FaxItem(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class User(BaseModel):
+    id: int = Field(..., ge=0)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+    uid: UID
+    phones: Optional[Sequence[Phone]] = Field(None, max_length=10)
+    fax: Optional[Sequence[FaxItem]] = None
+    height: Optional[Union[int, float]] = Field(None, ge=1.0, le=300.0)
+    weight: Optional[Union[float, int]] = Field(None, ge=1.0, le=1000.0)
+    age: Optional[int] = Field(None, gt=0, le=200)
+    rating: Optional[float] = Field(None, gt=0.0, le=5.0)
+
+
+class Users(RootModel[Sequence[User]]):
+    root: Sequence[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[Sequence[str]]):
+    root: Sequence[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[Sequence[Api]]):
+    root: Sequence[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_standard_collections.py 0.34.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_standard_collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_standard_collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_standard_collections.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field, RootModel
+
+
+class Pet(BaseModel):
+    id: int = Field(..., ge=0, le=9223372036854775807)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+
+
+class Pets(RootModel[list[Pet]]):
+    root: list[Pet] = Field(..., max_length=10, min_length=1)
+
+
+class UID(RootModel[int]):
+    root: int = Field(..., ge=0)
+
+
+class Phone(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class FaxItem(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class User(BaseModel):
+    id: int = Field(..., ge=0)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+    uid: UID
+    phones: Optional[list[Phone]] = Field(None, max_length=10)
+    fax: Optional[list[FaxItem]] = None
+    height: Optional[Union[int, float]] = Field(None, ge=1.0, le=300.0)
+    weight: Optional[Union[float, int]] = Field(None, ge=1.0, le=1000.0)
+    age: Optional[int] = Field(None, gt=0, le=200)
+    rating: Optional[float] = Field(None, gt=0.0, le=5.0)
+
+
+class Users(RootModel[list[User]]):
+    root: list[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[list[str]]):
+    root: list[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[list[Api]]):
+    root: list[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_standard_collections_set.py 0.34.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_standard_collections_set.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_standard_collections_set.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_standard_collections_set.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field, RootModel
+
+
+class Pet(BaseModel):
+    id: int = Field(..., ge=0, le=9223372036854775807)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+
+
+class Pets(RootModel[set[Pet]]):
+    root: set[Pet] = Field(..., max_length=10, min_length=1)
+
+
+class UID(RootModel[int]):
+    root: int = Field(..., ge=0)
+
+
+class Phone(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class FaxItem(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class User(BaseModel):
+    id: int = Field(..., ge=0)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+    uid: UID
+    phones: Optional[list[Phone]] = Field(None, max_length=10)
+    fax: Optional[list[FaxItem]] = None
+    height: Optional[Union[int, float]] = Field(None, ge=1.0, le=300.0)
+    weight: Optional[Union[float, int]] = Field(None, ge=1.0, le=1000.0)
+    age: Optional[int] = Field(None, gt=0, le=200)
+    rating: Optional[float] = Field(None, gt=0.0, le=5.0)
+
+
+class Users(RootModel[list[User]]):
+    root: list[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[list[str]]):
+    root: list[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[list[Api]]):
+    root: list[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_use_unique_items_as_set.py 0.34.0-1/tests/data/expected/main/openapi/with_field_constraints_use_unique_items_as_set.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_use_unique_items_as_set.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/with_field_constraints_use_unique_items_as_set.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Set, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int = Field(..., ge=0, le=9223372036854775807)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+
+
+class Pets(BaseModel):
+    __root__: Set[Pet] = Field(..., max_items=10, min_items=1, unique_items=True)
+
+
+class UID(BaseModel):
+    __root__: int = Field(..., ge=0)
+
+
+class Phone(BaseModel):
+    __root__: str = Field(..., min_length=3)
+
+
+class FaxItem(BaseModel):
+    __root__: str = Field(..., min_length=3)
+
+
+class User(BaseModel):
+    id: int = Field(..., ge=0)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+    uid: UID
+    phones: Optional[List[Phone]] = Field(None, max_items=10)
+    fax: Optional[List[FaxItem]] = None
+    height: Optional[Union[int, float]] = Field(None, ge=1.0, le=300.0)
+    weight: Optional[Union[float, int]] = Field(None, ge=1.0, le=1000.0)
+    age: Optional[int] = Field(None, gt=0, le=200)
+    rating: Optional[float] = Field(None, gt=0.0, le=5.0)
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_snake_case_field.py 0.34.0-1/tests/data/expected/main/openapi/with_snake_case_field.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_snake_case_field.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/with_snake_case_field.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,73 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    api_key: Optional[str] = Field(
+        None, alias='apiKey', description='To be used as a dataset parameter value'
+    )
+    api_version_number: Optional[str] = Field(
+        None,
+        alias='apiVersionNumber',
+        description='To be used as a version parameter value',
+    )
+    api_url: Optional[AnyUrl] = Field(
+        None, alias='apiUrl', description="The URL describing the dataset's fields"
+    )
+    api_documentation_url: Optional[AnyUrl] = Field(
+        None,
+        alias='apiDocumentationUrl',
+        description='A URL to the API console for each API',
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_strip_default_none.py 0.34.0-1/tests/data/expected/main/openapi/with_strip_default_none.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_strip_default_none.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/with_strip_default_none.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str]
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str]
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str]
+
+
+class Result(BaseModel):
+    event: Optional[Event]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/without_field_constraints.py 0.34.0-1/tests/data/expected/main/openapi/without_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/openapi/without_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/without_field_constraints.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,84 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field, confloat, conint, constr
+
+
+class Pet(BaseModel):
+    id: conint(ge=0, le=9223372036854775807)
+    name: constr(max_length=256)
+    tag: Optional[constr(max_length=64)] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet] = Field(..., max_items=10, min_items=1, unique_items=True)
+
+
+class UID(BaseModel):
+    __root__: conint(ge=0)
+
+
+class Phone(BaseModel):
+    __root__: constr(min_length=3)
+
+
+class User(BaseModel):
+    id: conint(ge=0)
+    name: constr(max_length=256)
+    tag: Optional[constr(max_length=64)] = None
+    uid: UID
+    phones: Optional[List[Phone]] = Field(None, max_items=10)
+    fax: Optional[List[constr(min_length=3)]] = None
+    height: Optional[Union[conint(ge=1, le=300), confloat(ge=1.0, le=300.0)]] = None
+    weight: Optional[Union[confloat(ge=1.0, le=1000.0), conint(ge=1, le=1000)]] = None
+    age: Optional[conint(le=200, gt=0)] = None
+    rating: Optional[confloat(le=5.0, gt=0.0)] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/without_field_constraints_pydantic_v2.py 0.34.0-1/tests/data/expected/main/openapi/without_field_constraints_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/without_field_constraints_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/openapi/without_field_constraints_pydantic_v2.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,84 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field, RootModel, confloat, conint, constr
+
+
+class Pet(BaseModel):
+    id: conint(ge=0, le=9223372036854775807)
+    name: constr(max_length=256)
+    tag: Optional[constr(max_length=64)] = None
+
+
+class Pets(RootModel[List[Pet]]):
+    root: List[Pet] = Field(..., max_length=10, min_length=1)
+
+
+class UID(RootModel[conint(ge=0)]):
+    root: conint(ge=0)
+
+
+class Phone(RootModel[constr(min_length=3)]):
+    root: constr(min_length=3)
+
+
+class User(BaseModel):
+    id: conint(ge=0)
+    name: constr(max_length=256)
+    tag: Optional[constr(max_length=64)] = None
+    uid: UID
+    phones: Optional[List[Phone]] = Field(None, max_length=10)
+    fax: Optional[List[constr(min_length=3)]] = None
+    height: Optional[Union[conint(ge=1, le=300), confloat(ge=1.0, le=300.0)]] = None
+    weight: Optional[Union[confloat(ge=1.0, le=1000.0), conint(ge=1, le=1000)]] = None
+    age: Optional[conint(le=200, gt=0)] = None
+    rating: Optional[confloat(le=5.0, gt=0.0)] = None
+
+
+class Users(RootModel[List[User]]):
+    root: List[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[List[str]]):
+    root: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[List[Api]]):
+    root: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/space_and_special_characters_dict.py 0.34.0-1/tests/data/expected/main/space_and_special_characters_dict.py
--- 0.26.4-3/tests/data/expected/main/space_and_special_characters_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/space_and_special_characters_dict.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,46 @@
+# generated by datamodel-codegen:
+#   filename:  space_and_special_characters_dict.py
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class InitialParameters(BaseModel):
+    V1: int
+    V2: int
+
+
+class Data(BaseModel):
+    Length__m_: float = Field(..., alias='Length (m)')
+    Symmetric_deviation____: float = Field(..., alias='Symmetric deviation (%)')
+    Total_running_time__s_: int = Field(..., alias='Total running time (s)')
+    Mass__kg_: float = Field(..., alias='Mass (kg)')
+    Initial_parameters: InitialParameters = Field(..., alias='Initial parameters')
+    class_: str = Field(..., alias='class')
+
+
+class Values(BaseModel):
+    field_1_Step: str = Field(..., alias='1 Step')
+    field_2_Step: str = Field(..., alias='2 Step')
+
+
+class Recursive1(BaseModel):
+    value: float
+
+
+class Sub(BaseModel):
+    recursive: Recursive1
+
+
+class Recursive(BaseModel):
+    sub: Sub
+
+
+class Model(BaseModel):
+    Serial_Number: str = Field(..., alias='Serial Number')
+    Timestamp: str
+    Data: Data
+    values: Values
+    recursive: Recursive
diff -pruN 0.26.4-3/tests/data/expected/main/yaml.py 0.34.0-1/tests/data/expected/main/yaml.py
--- 0.26.4-3/tests/data/expected/main/yaml.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main/yaml.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  pet.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    name: str
+    age: int
+
+
+class Model(BaseModel):
+    Pet: Pet
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main/output.py 0.34.0-1/tests/data/expected/main_kr/main/output.py
--- 0.26.4-3/tests/data/expected/main_kr/main/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/main/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_base_class/output.py 0.34.0-1/tests/data/expected/main_kr/main_base_class/output.py
--- 0.26.4-3/tests/data/expected/main_kr/main_base_class/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/main_base_class/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,71 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, Field
+
+from custom_module import Base
+
+
+class Pet(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(Base):
+    __root__: List[Pet]
+
+
+class User(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(Base):
+    __root__: List[User]
+
+
+class Id(Base):
+    __root__: str
+
+
+class Rules(Base):
+    __root__: List[str]
+
+
+class Error(Base):
+    code: int
+    message: str
+
+
+class Api(Base):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(Base):
+    __root__: List[Api]
+
+
+class Event(Base):
+    name: Optional[str] = None
+
+
+class Result(Base):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_custom_template_dir/output.py 0.34.0-1/tests/data/expected/main_kr/main_custom_template_dir/output.py
--- 0.26.4-3/tests/data/expected/main_kr/main_custom_template_dir/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/main_custom_template_dir/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,61 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):  # 1 2, 1 2, this is just a pet
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[AnyUrl] = None
+    apiDocumentationUrl: Optional[AnyUrl] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/__init__.py 0.34.0-1/tests/data/expected/main_kr/main_modular/__init__.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/main_modular/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import foo as foo_1
+from . import models
+from .nested import foo as foo_2
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[foo_1.Tea] = None
+    nested: Optional[foo_2.Tea] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/bar.py 0.34.0-1/tests/data/expected/main_kr/main_modular/bar.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/main_modular/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class FieldModel(BaseModel):
+    __root__: str = Field(..., example='green')
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/collections.py 0.34.0-1/tests/data/expected/main_kr/main_modular/collections.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/main_modular/collections.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,51 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: List[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: List[models.User]
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/foo/__init__.py 0.34.0-1/tests/data/expected/main_kr/main_modular/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/main_modular/foo/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import Id
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/foo/bar.py 0.34.0-1/tests/data/expected/main_kr/main_modular/foo/bar.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/main_modular/foo/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[Dict[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[List[Dict[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/models.py 0.34.0-1/tests/data/expected/main_kr/main_modular/models.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/main_modular/models.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[Union[str, float, int, bool, Dict[str, Any], List[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/nested/__init__.py 0.34.0-1/tests/data/expected/main_kr/main_modular/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/main_modular/nested/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/nested/foo.py 0.34.0-1/tests/data/expected/main_kr/main_modular/nested/foo.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/main_modular/nested/foo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+from .. import Id, OptionalModel
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class TeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class ListModel(BaseModel):
+    __root__: List[Tea]
+
+
+Tea.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/woo/__init__.py 0.34.0-1/tests/data/expected/main_kr/main_modular/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/main_modular/woo/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/woo/boo.py 0.34.0-1/tests/data/expected/main_kr/main_modular/woo/boo.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/main_modular/woo/boo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import Source, bar, foo
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[foo.Cocoa] = None
+    field: Optional[bar.FieldModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_no_file/output.py 0.34.0-1/tests/data/expected/main_kr/main_no_file/output.py
--- 0.26.4-3/tests/data/expected/main_kr/main_no_file/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/main_no_file/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_use_field_description/output.py 0.34.0-1/tests/data/expected/main_kr/main_use_field_description/output.py
--- 0.26.4-3/tests/data/expected/main_kr/main_use_field_description/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/main_use_field_description/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,74 @@
+# generated by datamodel-codegen:
+#   filename:  api_multiline_docstrings.yaml
+#   timestamp: 2022-11-11T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = None
+    """
+    To be used as a dataset parameter value.
+    Now also with multi-line docstrings.
+    """
+    apiVersionNumber: Optional[str] = None
+    """
+    To be used as a version parameter value
+    """
+    apiUrl: Optional[AnyUrl] = None
+    """
+    The URL describing the dataset's fields
+    """
+    apiDocumentationUrl: Optional[AnyUrl] = None
+    """
+    A URL to the API console for each API
+    """
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_use_schema_description/output.py 0.34.0-1/tests/data/expected/main_kr/main_use_schema_description/output.py
--- 0.26.4-3/tests/data/expected/main_kr/main_use_schema_description/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/main_use_schema_description/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,79 @@
+# generated by datamodel-codegen:
+#   filename:  api_multiline_docstrings.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    """
+    error result.
+    Now with multi-line docstrings.
+    """
+
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None,
+        description='To be used as a dataset parameter value.\nNow also with multi-line docstrings.',
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    """
+    Event object
+    """
+
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/pyproject/output.py 0.34.0-1/tests/data/expected/main_kr/pyproject/output.py
--- 0.26.4-3/tests/data/expected/main_kr/pyproject/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/pyproject/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,92 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import (
+    annotations,
+)
+
+from typing import (
+    List,
+    Optional,
+)
+
+from pydantic import (
+    AnyUrl,
+    BaseModel,
+    Field,
+)
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[
+        str
+    ] = Field(
+        None,
+        description="To be used as a dataset parameter value",
+    )
+    apiVersionNumber: Optional[
+        str
+    ] = Field(
+        None,
+        description="To be used as a version parameter value",
+    )
+    apiUrl: Optional[
+        AnyUrl
+    ] = Field(
+        None,
+        description="The URL describing the dataset's fields",
+    )
+    apiDocumentationUrl: Optional[
+        AnyUrl
+    ] = Field(
+        None,
+        description="A URL to the API console for each API",
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[
+        Event
+    ] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/pyproject/output.strictstr.py 0.34.0-1/tests/data/expected/main_kr/pyproject/output.strictstr.py
--- 0.26.4-3/tests/data/expected/main_kr/pyproject/output.strictstr.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/pyproject/output.strictstr.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field, StrictStr
+
+
+class Pet(BaseModel):
+    id: int
+    name: StrictStr
+    tag: Optional[StrictStr] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: StrictStr
+    tag: Optional[StrictStr] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: StrictStr
+
+
+class Rules(BaseModel):
+    __root__: List[StrictStr]
+
+
+class Error(BaseModel):
+    code: int
+    message: StrictStr
+
+
+class Api(BaseModel):
+    apiKey: Optional[StrictStr] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[StrictStr] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[StrictStr] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/target_python_version/output.py 0.34.0-1/tests/data/expected/main_kr/target_python_version/output.py
--- 0.26.4-3/tests/data/expected/main_kr/target_python_version/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/main_kr/target_python_version/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/graphql/field-default-enum.py 0.34.0-1/tests/data/expected/parser/graphql/field-default-enum.py
--- 0.26.4-3/tests/data/expected/parser/graphql/field-default-enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/graphql/field-default-enum.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  field-default-enum.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Literal, Optional, TypeAlias
+
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class Color(Enum):
+    BLUE = 'BLUE'
+    GREEN = 'GREEN'
+    RED = 'RED'
+
+
+class Car(BaseModel):
+    colorList: Optional[List[Color]] = [Color.RED]
+    colorOne: Optional[Color] = Color.GREEN
+    typename__: Optional[Literal['Car']] = Field('Car', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/parser/graphql/union-aliased-bug.py 0.34.0-1/tests/data/expected/parser/graphql/union-aliased-bug.py
--- 0.26.4-3/tests/data/expected/parser/graphql/union-aliased-bug.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/graphql/union-aliased-bug.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,45 @@
+# generated by datamodel-codegen:
+#   filename:  union-aliased-bug.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional, TypeAlias, Union
+
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class GroupMetadata(BaseModel):
+    name: String
+    typename__: Optional[Literal['GroupMetadata']] = Field(
+        'GroupMetadata', alias='__typename'
+    )
+
+
+class Resource(BaseModel):
+    metadata: UserMetadata
+    typename__: Optional[Literal['Resource']] = Field('Resource', alias='__typename')
+
+
+class UserMetadata(BaseModel):
+    name: String
+    typename__: Optional[Literal['UserMetadata']] = Field(
+        'UserMetadata', alias='__typename'
+    )
+
+
+Metadata: TypeAlias = Union[
+    'GroupMetadata',
+    'UserMetadata',
+]
diff -pruN 0.26.4-3/tests/data/expected/parser/graphql/union-commented.py 0.34.0-1/tests/data/expected/parser/graphql/union-commented.py
--- 0.26.4-3/tests/data/expected/parser/graphql/union-commented.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/graphql/union-commented.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,65 @@
+# generated by datamodel-codegen:
+#   filename:  union-commented.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional, TypeAlias, Union
+
+from pydantic import BaseModel, Field
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class GroupMetadata(BaseModel):
+    """
+    This is a test comment in a single line
+    """
+
+    name: String
+    typename__: Optional[Literal['GroupMetadata']] = Field(
+        'GroupMetadata', alias='__typename'
+    )
+
+
+class Resource(BaseModel):
+    metadata: UserMetadata
+    typename__: Optional[Literal['Resource']] = Field('Resource', alias='__typename')
+
+
+class UserMetadata(BaseModel):
+    """
+    This is a multiline comment,
+    with a line break,
+    and a line break
+    """
+
+    name: String
+    typename__: Optional[Literal['UserMetadata']] = Field(
+        'UserMetadata', alias='__typename'
+    )
+
+
+# This is a single line comment
+DummyMetadata: TypeAlias = Union[
+    'GroupMetadata',
+    'UserMetadata',
+]
+
+
+# This is another multiline comment,
+# with a line break,
+# and another line break
+Metadata: TypeAlias = Union[
+    'GroupMetadata',
+    'UserMetadata',
+]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse/format.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse/format.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse/format.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse/format.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,50 @@
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[AnyUrl] = None
+    apiDocumentationUrl: Optional[AnyUrl] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse/with_import.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse/with_import.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse/with_import.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse/with_import.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,55 @@
+from __future__ import annotations
+from typing import List, Optional
+from pydantic import AnyUrl, BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[AnyUrl] = None
+    apiDocumentationUrl: Optional[AnyUrl] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse/with_import_format.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse/with_import_format.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse/with_import_format.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse/with_import_format.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,57 @@
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[AnyUrl] = None
+    apiDocumentationUrl: Optional[AnyUrl] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse/with_import_format_custom_module.Base.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse/with_import_format_custom_module.Base.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse/with_import_format_custom_module.Base.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse/with_import_format_custom_module.Base.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,59 @@
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl
+
+from custom_module import Base
+
+
+class Pet(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(Base):
+    __root__: List[Pet]
+
+
+class User(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(Base):
+    __root__: List[User]
+
+
+class Id(Base):
+    __root__: str
+
+
+class Rules(Base):
+    __root__: List[str]
+
+
+class Error(Base):
+    code: int
+    message: str
+
+
+class Api(Base):
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[AnyUrl] = None
+    apiDocumentationUrl: Optional[AnyUrl] = None
+
+
+class Apis(Base):
+    __root__: List[Api]
+
+
+class Event(Base):
+    name: Optional[str] = None
+
+
+class Result(Base):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/format.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/format.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/format.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/format.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,64 @@
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    code: int
+    message: str
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
+
+
+class Broken(BaseModel):
+    foo: Optional[str] = None
+    bar: Optional[int] = None
+
+
+class BrokenArray(BaseModel):
+    broken: Optional[Dict[str, List[Broken]]] = None
+
+
+class FileSetUpload(BaseModel):
+    task_id: Optional[str] = None
+    tags: Dict[str, List[str]]
+
+
+class Test(BaseModel):
+    broken: Optional[Dict[str, Broken]] = None
+    failing: Optional[Dict[str, str]] = {}
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,67 @@
+from __future__ import annotations
+from typing import Dict, List, Optional
+from pydantic import BaseModel, Extra
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    class Config:
+        extra = Extra.allow
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    class Config:
+        extra = Extra.forbid
+    code: int
+    message: str
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
+
+
+class Broken(BaseModel):
+    foo: Optional[str] = None
+    bar: Optional[int] = None
+
+
+class BrokenArray(BaseModel):
+    broken: Optional[Dict[str, List[Broken]]] = None
+
+
+class FileSetUpload(BaseModel):
+    task_id: Optional[str] = None
+    tags: Dict[str, List[str]]
+
+
+class Test(BaseModel):
+    broken: Optional[Dict[str, Broken]] = None
+    failing: Optional[Dict[str, str]] = {}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import_format.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import_format.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import_format.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import_format.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,71 @@
+from __future__ import annotations
+
+from typing import Dict, List, Optional
+
+from pydantic import BaseModel, Extra
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    code: int
+    message: str
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
+
+
+class Broken(BaseModel):
+    foo: Optional[str] = None
+    bar: Optional[int] = None
+
+
+class BrokenArray(BaseModel):
+    broken: Optional[Dict[str, List[Broken]]] = None
+
+
+class FileSetUpload(BaseModel):
+    task_id: Optional[str] = None
+    tags: Dict[str, List[str]]
+
+
+class Test(BaseModel):
+    broken: Optional[Dict[str, Broken]] = None
+    failing: Optional[Dict[str, str]] = {}
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import_format_custom_module.Base.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import_format_custom_module.Base.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import_format_custom_module.Base.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import_format_custom_module.Base.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,73 @@
+from __future__ import annotations
+
+from typing import Dict, List, Optional
+
+from pydantic import Extra
+
+from custom_module import Base
+
+
+class Pet(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(Base):
+    __root__: List[Pet]
+
+
+class User(Base):
+    class Config:
+        extra = Extra.allow
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(Base):
+    __root__: List[User]
+
+
+class Id(Base):
+    __root__: str
+
+
+class Rules(Base):
+    __root__: List[str]
+
+
+class Error(Base):
+    class Config:
+        extra = Extra.forbid
+
+    code: int
+    message: str
+
+
+class Event(Base):
+    name: Optional[str] = None
+
+
+class Result(Base):
+    event: Optional[Event] = None
+
+
+class Broken(Base):
+    foo: Optional[str] = None
+    bar: Optional[int] = None
+
+
+class BrokenArray(Base):
+    broken: Optional[Dict[str, List[Broken]]] = None
+
+
+class FileSetUpload(Base):
+    task_id: Optional[str] = None
+    tags: Dict[str, List[str]]
+
+
+class Test(Base):
+    broken: Optional[Dict[str, Broken]] = None
+    failing: Optional[Dict[str, str]] = {}
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/__init__.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/__init__.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,83 @@
+from __future__ import annotations
+
+from datetime import date, datetime
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import BaseModel, Field, conint
+
+from . import model_s
+
+
+class Pet(Enum):
+    ca_t = 'ca-t'
+    dog_ = 'dog*'
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class HomeAddress(BaseModel):
+    address_1: Optional[str] = Field(None, alias='address-1')
+
+
+class TeamMembers(BaseModel):
+    __root__: List[str]
+
+
+class AllOfObj(BaseModel):
+    name: Optional[str] = None
+    number: Optional[str] = None
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Result(BaseModel):
+    event: Optional[model_s.EvenT] = None
+
+
+class Source(BaseModel):
+    country_name: Optional[str] = Field(None, alias='country-name')
+
+
+class UserName(BaseModel):
+    first_name: Optional[str] = Field(None, alias='first-name')
+    home_address: Optional[HomeAddress] = Field(None, alias='home-address')
+
+
+class AllOfRef(UserName, HomeAddress):
+    pass
+
+
+class AllOfCombine(UserName):
+    birth_date: Optional[date] = Field(None, alias='birth-date')
+    size: Optional[conint(ge=1)] = None
+
+
+class AnyOfCombine(HomeAddress, UserName):
+    age: Optional[str] = None
+
+
+class Item(HomeAddress, UserName):
+    age: Optional[str] = None
+
+
+class AnyOfCombineInObject(BaseModel):
+    item: Optional[Item] = None
+
+
+class AnyOfCombineInArrayItem(HomeAddress, UserName):
+    age: Optional[str] = None
+
+
+class AnyOfCombineInArray(BaseModel):
+    __root__: List[AnyOfCombineInArrayItem]
+
+
+class AnyOfCombineInRoot(HomeAddress, UserName):
+    age: Optional[str] = None
+    birth_date: Optional[datetime] = Field(None, alias='birth-date')
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/collection_s.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/collection_s.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/collection_s.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/collection_s.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,38 @@
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import model_s
+
+
+class PetS(BaseModel):
+    __root__: List[model_s.PeT]
+
+
+class UserS(BaseModel):
+    __root__: List[model_s.UseR]
+
+
+class RuleS(BaseModel):
+    __root__: List[str]
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class ApiS(BaseModel):
+    __root__: List[Api]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/fo_o/__init__.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/fo_o/__init__.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/fo_o/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/fo_o/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+from .. import Id
+
+
+class TeA(BaseModel):
+    flavour_name: Optional[str] = Field(None, alias='flavour-name')
+    id: Optional[Id] = None
+
+
+class CocoA(BaseModel):
+    quality: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/fo_o/ba_r.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/fo_o/ba_r.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/fo_o/ba_r.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/fo_o/ba_r.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class ThinG(BaseModel):
+    attribute_s: Optional[Dict[str, Any]] = Field(None, alias='attribute-s')
+
+
+class ThanG(BaseModel):
+    attributes: Optional[List[Dict[str, Any]]] = None
+
+
+class ClonE(ThinG):
+    pass
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/model_s.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/model_s.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/model_s.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/model_s.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,29 @@
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class SpecieS(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class PeT(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[SpecieS] = None
+
+
+class UseR(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class EvenT(BaseModel):
+    name: Optional[Union[str, float, int, bool, Dict[str, Any], List[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/wo_o/bo_o.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/wo_o/bo_o.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/wo_o/bo_o.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/wo_o/bo_o.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+from .. import Source, fo_o
+
+
+class ChocolatE(BaseModel):
+    flavour_name: Optional[str] = Field(None, alias='flavour-name')
+    sourc_e: Optional[Source] = Field(None, alias='sourc-e')
+    coco_a: Optional[fo_o.CocoA] = Field(None, alias='coco-a')
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_allof/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_allof/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_allof/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_allof/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,80 @@
+from __future__ import annotations
+
+from datetime import date, datetime
+from typing import List, Optional
+
+from pydantic import BaseModel, conint
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Car(BaseModel):
+    number: str
+
+
+class AllOfref(Pet, Car):
+    pass
+
+
+class AllOfobj(BaseModel):
+    name: Optional[str] = None
+    number: Optional[str] = None
+
+
+class AllOfCombine(Pet):
+    birthdate: Optional[date] = None
+    size: Optional[conint(ge=1)] = None
+
+
+class AnyOfCombine(Pet, Car):
+    age: Optional[str] = None
+
+
+class Item(Pet, Car):
+    age: Optional[str] = None
+
+
+class AnyOfCombineInObject(BaseModel):
+    item: Optional[Item] = None
+
+
+class AnyOfCombineInArrayItem(Pet, Car):
+    age: Optional[str] = None
+
+
+class AnyOfCombineInArray(BaseModel):
+    __root__: List[AnyOfCombineInArrayItem]
+
+
+class AnyOfCombineInRoot(Pet, Car):
+    age: Optional[str] = None
+    birthdate: Optional[datetime] = None
+
+
+class AnyOfCombineUnknownObjectInRoot(BaseModel):
+    __root__: List[Pet]
+
+
+class AnyOfCombineUnknownObjectInArray(Pet):
+    pass
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class AllOfNested3(AllOfCombine):
+    name: Optional[AnyOfCombine] = None
+
+
+class AllOfNested2(AllOfNested3):
+    name: Optional[AllOfNested1] = None
+
+
+class AllOfNested1(AllOfNested2):
+    name: Optional[AllOfCombine] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_allof_required_fields/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_allof_required_fields/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_allof_required_fields/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_allof_required_fields/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import BaseModel, Field
+
+
+class EmailMessage(BaseModel):
+    message: str = Field(..., description='The email message text.')
+    subject: str = Field(..., description='The subject line of the email.')
+    to: List[str] = Field(..., description='A list of email addresses.')
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_allof_same_prefix_with_ref/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_allof_same_prefix_with_ref/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_allof_same_prefix_with_ref/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_allof_same_prefix_with_ref/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class FooBar(BaseModel):
+    id: Optional[int] = None
+
+
+class FooBarBaz(BaseModel):
+    id: Optional[int] = None
+
+
+class Foo(BaseModel):
+    foo_bar: Optional[FooBarBaz] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_any/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_any/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_any/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_any/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,10 @@
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel
+
+
+class Item(BaseModel):
+    bar: Optional[Any] = None
+    foo: str
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_anyof/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_anyof/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_anyof/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_anyof/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,52 @@
+from __future__ import annotations
+
+from datetime import date
+from typing import Dict, List, Optional, Union
+
+from pydantic import BaseModel, constr
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Car(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class AnyOfItem1(BaseModel):
+    name: Optional[str] = None
+
+
+class AnyOfItem(BaseModel):
+    __root__: Union[Pet, Car, AnyOfItem1, constr(max_length=5000)]
+
+
+class Item(BaseModel):
+    name: Optional[str] = None
+
+
+class AnyOfobj(BaseModel):
+    item: Optional[Union[Pet, Car, Item, constr(max_length=5000)]] = None
+
+
+class AnyOfArray1(BaseModel):
+    name: Optional[str] = None
+    birthday: Optional[date] = None
+
+
+class AnyOfArray(BaseModel):
+    __root__: List[Union[Pet, Car, AnyOfArray1, constr(max_length=5000)]]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Config(BaseModel):
+    setting: Optional[Dict[str, Union[str, List[str]]]] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_anyof_required/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_anyof_required/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_anyof_required/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_anyof_required/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class EmailMessage(BaseModel):
+    bcc: Optional[List[str]] = Field(
+        None, description='A list of "blind carbon copy" email addresses.'
+    )
+    cc: Optional[List[str]] = Field(
+        None, description='A list of "carbon copy" email addresses.'
+    )
+    message: str = Field(..., description='The email message text.')
+    subject: str = Field(..., description='The subject line of the email.')
+    to: Optional[List[str]] = Field(None, description='A list of email addresses.')
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_array_called_fields_with_oneOf_items/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_array_called_fields_with_oneOf_items/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_array_called_fields_with_oneOf_items/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_array_called_fields_with_oneOf_items/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Fields(BaseModel):
+    a: Optional[str] = None
+
+
+class Fields1(BaseModel):
+    b: Optional[str] = Field(None, regex='^[a-zA-Z_]+$')
+
+
+class BadSchema(BaseModel):
+    fields: Optional[List[Union[Fields, Fields1]]] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_array_enum/with_import_format.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_array_enum/with_import_format.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_array_enum/with_import_format.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_array_enum/with_import_format.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+from __future__ import annotations
+
+from enum import Enum
+from typing import List
+
+from pydantic import BaseModel
+
+
+class Type1Enum(Enum):
+    enumOne = 'enumOne'
+    enumTwo = 'enumTwo'
+
+
+class Type1(BaseModel):
+    __root__: List[Type1Enum]
+
+
+class Type2(Enum):
+    enumFour = 'enumFour'
+    enumFive = 'enumFive'
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_duplicate_models/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_duplicate_models/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_duplicate_models/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_duplicate_models/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,56 @@
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
+
+
+class Events(BaseModel):
+    __root__: List[Event]
+
+
+class EventRoot(BaseModel):
+    __root__: Event
+
+
+class EventObject(BaseModel):
+    event: Optional[Event] = None
+
+
+class DuplicateObject1(BaseModel):
+    event: Optional[List[Event]] = None
+
+
+class Event1(BaseModel):
+    event: Optional[Event] = None
+
+
+class DuplicateObject2(BaseModel):
+    event: Optional[Event1] = None
+
+
+class DuplicateObject3(BaseModel):
+    __root__: Event
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_duplicate_models_simplify/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_duplicate_models_simplify/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_duplicate_models_simplify/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_duplicate_models_simplify/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class M(BaseModel):
+    name: Optional[str] = None
+
+
+class R(M):
+    pass
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_enum_models/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_enum_models/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_enum_models/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_enum_models/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,138 @@
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Kind(Enum):
+    dog = 'dog'
+    cat = 'cat'
+
+
+class Type(Enum):
+    animal = 'animal'
+
+
+class Number(Enum):
+    integer_1 = 1
+
+
+class Boolean(Enum):
+    boolean_True = True
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    kind: Optional[Kind] = None
+    type: Optional[Type] = None
+    number: Number
+    boolean: Boolean
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class Kind1(Enum):
+    snake = 'snake'
+    rabbit = 'rabbit'
+
+
+class Animal(BaseModel):
+    kind: Optional[Kind1] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Type1(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class EnumObject(BaseModel):
+    type: Optional[Type1] = None
+
+
+class EnumRoot(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class IntEnum(Enum):
+    number_1 = 1
+    number_2 = 2
+
+
+class AliasEnum(Enum):
+    a = 1
+    b = 2
+    c = 3
+
+
+class MultipleTypeEnum(Enum):
+    red = 'red'
+    amber = 'amber'
+    green = 'green'
+    NoneType_None = None
+    int_42 = 42
+
+
+class SingleEnum(Enum):
+    pet = 'pet'
+
+
+class ArrayEnumEnum(Enum):
+    cat = 'cat'
+
+
+class ArrayEnumEnum1(Enum):
+    dog = 'dog'
+
+
+class ArrayEnum(BaseModel):
+    __root__: List[Union[ArrayEnumEnum, ArrayEnumEnum1]]
+
+
+class NestedVersionEnum(Enum):
+    RC1 = 'RC1'
+    RC1N = 'RC1N'
+    RC2 = 'RC2'
+    RC2N = 'RC2N'
+    RC3 = 'RC3'
+    RC4 = 'RC4'
+
+
+class NestedVersion(BaseModel):
+    __root__: Optional[NestedVersionEnum] = Field(
+        'RC1', description='nullable enum', example='RC2'
+    )
+
+
+class NestedNullableEnum(BaseModel):
+    nested_version: Optional[NestedVersion] = Field(
+        default_factory=lambda: NestedVersion.parse_obj('RC1'),
+        description='nullable enum',
+        example='RC2',
+    )
+
+
+class VersionEnum(Enum):
+    RC1 = 'RC1'
+    RC1N = 'RC1N'
+    RC2 = 'RC2'
+    RC2N = 'RC2N'
+    RC3 = 'RC3'
+    RC4 = 'RC4'
+
+
+class Version(BaseModel):
+    __root__: Optional[VersionEnum] = Field(
+        'RC1', description='nullable enum', example='RC2'
+    )
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/__init__.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/__init__.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,35 @@
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import foo as foo_1
+from . import models
+from .nested import foo as foo_2
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[foo_1.Tea] = None
+    nested: Optional[foo_2.Tea] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/bar.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/bar.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Field(BaseModel):
+    __root__: str
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/collections.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/collections.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/collections.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: List[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: List[models.User]
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[AnyUrl] = None
+    apiDocumentationUrl: Optional[AnyUrl] = None
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/foo/__init__.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/foo/__init__.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/foo/__init__.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import Id
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/foo/bar.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/foo/bar.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/foo/bar.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[Dict[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[List[Dict[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/models.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/models.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/models.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,29 @@
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[Union[str, float, int, bool, Dict[str, Any], List[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/nested/foo.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/nested/foo.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/nested/foo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+from .. import Id, OptionalModel
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class TeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class ListModel(BaseModel):
+    __root__: List[Tea]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/woo/boo.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/woo/boo.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/woo/boo.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import Source, bar, foo
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[foo.Cocoa] = None
+    field: Optional[bar.Field] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_nested_anyof/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_nested_anyof/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_nested_anyof/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_nested_anyof/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Type1(BaseModel):
+    prop: Optional[str] = None
+
+
+class Type2(BaseModel):
+    prop: Optional[str] = None
+
+
+class Container(BaseModel):
+    contents: List[Union[Type1, Type2]]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_nested_oneof/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_nested_oneof/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_nested_oneof/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_nested_oneof/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Type1(BaseModel):
+    prop: Optional[str] = None
+
+
+class Type2(BaseModel):
+    prop: Optional[str] = None
+
+
+class Container(BaseModel):
+    contents: List[Union[Type1, Type2]]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_oneof/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_oneof/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_oneof/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_oneof/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,52 @@
+from __future__ import annotations
+
+from datetime import date
+from typing import Dict, List, Optional, Union
+
+from pydantic import BaseModel, constr
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Car(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class OneOfItem1(BaseModel):
+    name: Optional[str] = None
+
+
+class OneOfItem(BaseModel):
+    __root__: Union[Pet, Car, OneOfItem1, constr(max_length=5000)]
+
+
+class Item(BaseModel):
+    name: Optional[str] = None
+
+
+class OneOfobj(BaseModel):
+    item: Optional[Union[Pet, Car, Item, constr(max_length=5000)]] = None
+
+
+class OneOfArray1(BaseModel):
+    name: Optional[str] = None
+    birthday: Optional[date] = None
+
+
+class OneOfArray(BaseModel):
+    __root__: List[Union[Pet, Car, OneOfArray1, constr(max_length=5000)]]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Config(BaseModel):
+    setting: Optional[Dict[str, Union[str, List[str]]]] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_remote_ref/with_import_format.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_remote_ref/with_import_format.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_remote_ref/with_import_format.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_remote_ref/with_import_format.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AnyUrl, BaseModel, conint
+
+
+class Problem(BaseModel):
+    detail: Optional[str] = None
+    instance: Optional[AnyUrl] = None
+    status: Optional[conint(ge=100, lt=600)] = None
+    title: Optional[str] = None
+    type: Optional[AnyUrl] = 'about:blank'
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_required_null/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_required_null/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_required_null/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_required_null/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Type(Enum):
+    my_first_object = 'my_first_object'
+    my_second_object = 'my_second_object'
+    my_third_object = 'my_third_object'
+
+
+class ObjectBase(BaseModel):
+    name: Optional[str] = Field(None, description='Name of the object')
+    type: Optional[Type] = Field(None, description='Object type')
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_resolved_models/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_resolved_models/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_resolved_models/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_resolved_models/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,24 @@
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Resolved(BaseModel):
+    resolved: Optional[List[str]] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_responses_with_tag/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_responses_with_tag/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_responses_with_tag/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_responses_with_tag/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,62 @@
+from __future__ import annotations
+
+from datetime import datetime
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class PetForm(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class PetsGetResponse(BaseModel):
+    __root__: List[Pet]
+
+
+class FoodFoodIdGetResponse(BaseModel):
+    __root__: List[int]
+
+
+class UserGetResponse(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UserPostRequest(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersGetResponseItem(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersGetResponse(BaseModel):
+    __root__: List[UsersGetResponseItem]
+
+
+class UsersPostRequestItem(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersPostRequest(BaseModel):
+    __root__: List[UsersPostRequestItem]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_responses_without_content/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_responses_without_content/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_responses_without_content/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_responses_without_content/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,52 @@
+from __future__ import annotations
+
+from datetime import datetime
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class FoodFoodIdGetResponse(BaseModel):
+    __root__: List[int]
+
+
+class UserGetResponse(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UserPostRequest(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersGetResponseItem(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersGetResponse(BaseModel):
+    __root__: List[UsersGetResponseItem]
+
+
+class UsersPostRequestItem(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersPostRequest(BaseModel):
+    __root__: List[UsersPostRequestItem]
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class PetsGetResponse(BaseModel):
+    __root__: List[Pet]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_with_query_parameters/output.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_with_query_parameters/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_with_query_parameters/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_with_query_parameters/output.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,65 @@
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class PetForm(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class PetsPetIdGetParametersQuery(BaseModel):
+    include: Optional[str] = None
+
+
+class Filter(BaseModel):
+    type: Optional[str] = None
+    color: Optional[str] = None
+
+
+class MediaType(Enum):
+    xml = 'xml'
+    json = 'json'
+
+
+class MultipleMediaFilter(BaseModel):
+    type: Optional[str] = None
+    media_type: Optional[MediaType] = 'xml'
+
+
+class MultipleMediaFilter1(BaseModel):
+    type: Optional[str] = None
+    media_type: Optional[MediaType] = 'json'
+
+
+class PetsGetParametersQuery(BaseModel):
+    limit: Optional[int] = 0
+    HomeAddress: Optional[str] = 'Unknown'
+    kind: Optional[str] = 'dog'
+    filter: Optional[Filter] = None
+    multipleMediaFilter: Optional[
+        Union[MultipleMediaFilter, MultipleMediaFilter1]
+    ] = None
+
+
+class PetsGetResponse(BaseModel):
+    __root__: List[Pet]
+
+
+class PetsPostRequest(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_with_query_parameters/with_path_params.py 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_with_query_parameters/with_path_params.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_with_query_parameters/with_path_params.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/expected/parser/openapi/openapi_parser_with_query_parameters/with_path_params.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,66 @@
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class PetForm(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class PetsPetIdGetParameters(BaseModel):
+    petId: str
+    include: Optional[str] = None
+
+
+class Filter(BaseModel):
+    type: Optional[str] = None
+    color: Optional[str] = None
+
+
+class MediaType(Enum):
+    xml = 'xml'
+    json = 'json'
+
+
+class MultipleMediaFilter(BaseModel):
+    type: Optional[str] = None
+    media_type: Optional[MediaType] = 'xml'
+
+
+class MultipleMediaFilter1(BaseModel):
+    type: Optional[str] = None
+    media_type: Optional[MediaType] = 'json'
+
+
+class PetsGetParameters(BaseModel):
+    limit: Optional[int] = 0
+    HomeAddress: Optional[str] = 'Unknown'
+    kind: Optional[str] = 'dog'
+    filter: Optional[Filter] = None
+    multipleMediaFilter: Optional[
+        Union[MultipleMediaFilter, MultipleMediaFilter1]
+    ] = None
+
+
+class PetsGetResponse(BaseModel):
+    __root__: List[Pet]
+
+
+class PetsPostRequest(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/graphql/additional-imports-types.json 0.34.0-1/tests/data/graphql/additional-imports-types.json
--- 0.26.4-3/tests/data/graphql/additional-imports-types.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/additional-imports-types.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "Date": {
+    "py_type": "date"
+  },
+  "DateTime": {
+    "py_type": "datetime"
+  },
+  "MyCustomClass": {
+    "py_type": "MyCustomPythonClass"
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/additional-imports.graphql 0.34.0-1/tests/data/graphql/additional-imports.graphql
--- 0.26.4-3/tests/data/graphql/additional-imports.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/additional-imports.graphql	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,12 @@
+scalar Date
+
+"DateTime (ISO8601, example: 2020-01-01T10:11:12+00:00)"
+scalar DateTime
+
+scalar MyCustomClass
+
+type A {
+  a: Date!
+  b: DateTime!
+  c: MyCustomClass!
+}
diff -pruN 0.26.4-3/tests/data/graphql/annotated.graphql 0.34.0-1/tests/data/graphql/annotated.graphql
--- 0.26.4-3/tests/data/graphql/annotated.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/annotated.graphql	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,9 @@
+type A {
+    field: String!
+    optionalField: String
+    listField: [String!]!
+    listOptionalField: [String]!
+    optionalListField: [String!]
+    optionalListOptionalField: [String]
+    listListField:[[String!]!]!
+}
diff -pruN 0.26.4-3/tests/data/graphql/custom-scalar-types.graphql 0.34.0-1/tests/data/graphql/custom-scalar-types.graphql
--- 0.26.4-3/tests/data/graphql/custom-scalar-types.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/custom-scalar-types.graphql	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,6 @@
+scalar Long
+
+type A {
+  id: ID!
+  duration: Long!
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/custom-scalar-types.json 0.34.0-1/tests/data/graphql/custom-scalar-types.json
--- 0.26.4-3/tests/data/graphql/custom-scalar-types.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/custom-scalar-types.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,5 @@
+{
+  "Long": {
+    "py_type": "int"
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/different-types-of-fields.graphql 0.34.0-1/tests/data/graphql/different-types-of-fields.graphql
--- 0.26.4-3/tests/data/graphql/different-types-of-fields.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/different-types-of-fields.graphql	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+type A {
+    field: String!
+    optionalField: String
+    optionalListOptionalField: [String]
+    listOptionalField: [String]!
+    listField: [String!]!
+    optionalListOptionalListOptionalField:[[String]]
+    optionalListListOptionalField:[[String]!]
+    listListOptionalField:[[String]!]!
+    listOptionalListOptionalField:[[String]]!
+    optionalListOptionalListField:[[String!]]
+    optionalListListField:[[String!]!]
+    listListField:[[String!]!]!
+    listOptionalListField:[[String!]]!
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/enums.graphql 0.34.0-1/tests/data/graphql/enums.graphql
--- 0.26.4-3/tests/data/graphql/enums.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/enums.graphql	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+"Employee shift status"
+enum EmployeeShiftStatus {
+  "not on shift"
+  NOT_ON_SHIFT
+  "on shift"
+  ON_SHIFT
+}
+
+enum Color {
+  RED
+  GREEN
+  BLUE
+}
+
+enum EnumWithOneField {
+    FIELD
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/field-aliases.graphql 0.34.0-1/tests/data/graphql/field-aliases.graphql
--- 0.26.4-3/tests/data/graphql/field-aliases.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/field-aliases.graphql	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,6 @@
+scalar DateTime
+
+type DateTimePeriod {
+    from: DateTime!
+    to: DateTime!
+}
diff -pruN 0.26.4-3/tests/data/graphql/field-aliases.json 0.34.0-1/tests/data/graphql/field-aliases.json
--- 0.26.4-3/tests/data/graphql/field-aliases.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/field-aliases.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,4 @@
+{
+  "to": "periodTo",
+  "from": "periodFrom"
+}
diff -pruN 0.26.4-3/tests/data/graphql/field-default-enum.graphql 0.34.0-1/tests/data/graphql/field-default-enum.graphql
--- 0.26.4-3/tests/data/graphql/field-default-enum.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/field-default-enum.graphql	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,10 @@
+enum Color {
+  RED
+  GREEN
+  BLUE
+}
+
+input Car {
+  colorList: [Color!] = [RED]
+  colorOne: Color = GREEN
+}
diff -pruN 0.26.4-3/tests/data/graphql/github-api-aliases.json 0.34.0-1/tests/data/graphql/github-api-aliases.json
--- 0.26.4-3/tests/data/graphql/github-api-aliases.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/github-api-aliases.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+{
+  "fields": "fields_"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/github-api-types.json 0.34.0-1/tests/data/graphql/github-api-types.json
--- 0.26.4-3/tests/data/graphql/github-api-types.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/github-api-types.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "Date": {
+    "py_type": "date"
+  },
+  "DateTime": {
+    "py_type": "datetime"
+  },
+  "BigInt": {
+    "py_type": "int"
+  },
+  "PreciseDateTime": {
+    "py_type": "datetime"
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/github-api.graphql 0.34.0-1/tests/data/graphql/github-api.graphql
--- 0.26.4-3/tests/data/graphql/github-api.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/github-api.graphql	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,61141 @@
+directive @requiredCapabilities(
+  requiredCapabilities: [String!]
+) on ARGUMENT_DEFINITION | ENUM | ENUM_VALUE | FIELD_DEFINITION | INPUT_FIELD_DEFINITION | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION
+
+"""
+Marks an element of a GraphQL schema as only available via a preview header
+"""
+directive @preview(
+  """
+  The identifier of the API preview that toggles this field.
+  """
+  toggledBy: String!
+) on ARGUMENT_DEFINITION | ENUM | ENUM_VALUE | FIELD_DEFINITION | INPUT_FIELD_DEFINITION | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION
+
+"""
+Defines what type of global IDs are accepted for a mutation argument of type ID.
+"""
+directive @possibleTypes(
+  """
+  Abstract type of accepted global ID
+  """
+  abstractType: String
+
+  """
+  Accepted types of global IDs.
+  """
+  concreteTypes: [String!]!
+) on INPUT_FIELD_DEFINITION
+
+"""
+Autogenerated input type of AbortQueuedMigrations
+"""
+input AbortQueuedMigrationsInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the organization that is running the migrations.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of AbortQueuedMigrations
+"""
+type AbortQueuedMigrationsPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Did the operation succeed?
+  """
+  success: Boolean
+}
+
+"""
+Autogenerated input type of AbortRepositoryMigration
+"""
+input AbortRepositoryMigrationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the migration to be aborted.
+  """
+  migrationId: ID! @possibleTypes(concreteTypes: ["RepositoryMigration"])
+}
+
+"""
+Autogenerated return type of AbortRepositoryMigration
+"""
+type AbortRepositoryMigrationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Did the operation succeed?
+  """
+  success: Boolean
+}
+
+"""
+Autogenerated input type of AcceptEnterpriseAdministratorInvitation
+"""
+input AcceptEnterpriseAdministratorInvitationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the invitation being accepted
+  """
+  invitationId: ID! @possibleTypes(concreteTypes: ["EnterpriseAdministratorInvitation"])
+}
+
+"""
+Autogenerated return type of AcceptEnterpriseAdministratorInvitation
+"""
+type AcceptEnterpriseAdministratorInvitationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The invitation that was accepted.
+  """
+  invitation: EnterpriseAdministratorInvitation
+
+  """
+  A message confirming the result of accepting an administrator invitation.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of AcceptTopicSuggestion
+"""
+input AcceptTopicSuggestionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of the suggested topic.
+  """
+  name: String!
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of AcceptTopicSuggestion
+"""
+type AcceptTopicSuggestionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The accepted topic.
+  """
+  topic: Topic
+}
+
+"""
+Represents an object which can take actions on GitHub. Typically a User or Bot.
+"""
+interface Actor {
+  """
+  A URL pointing to the actor's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  The username of the actor.
+  """
+  login: String!
+
+  """
+  The HTTP path for this actor.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this actor.
+  """
+  url: URI!
+}
+
+"""
+Location information for an actor
+"""
+type ActorLocation {
+  """
+  City
+  """
+  city: String
+
+  """
+  Country name
+  """
+  country: String
+
+  """
+  Country code
+  """
+  countryCode: String
+
+  """
+  Region name
+  """
+  region: String
+
+  """
+  Region or state code
+  """
+  regionCode: String
+}
+
+"""
+The actor's type.
+"""
+enum ActorType {
+  """
+  Indicates a team actor.
+  """
+  TEAM
+
+  """
+  Indicates a user actor.
+  """
+  USER
+}
+
+"""
+Autogenerated input type of AddAssigneesToAssignable
+"""
+input AddAssigneesToAssignableInput {
+  """
+  The id of the assignable object to add assignees to.
+  """
+  assignableId: ID! @possibleTypes(concreteTypes: ["Issue", "PullRequest"], abstractType: "Assignable")
+
+  """
+  The id of users to add as assignees.
+  """
+  assigneeIds: [ID!]! @possibleTypes(concreteTypes: ["User"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated return type of AddAssigneesToAssignable
+"""
+type AddAssigneesToAssignablePayload {
+  """
+  The item that was assigned.
+  """
+  assignable: Assignable
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of AddComment
+"""
+input AddCommentInput {
+  """
+  The contents of the comment.
+  """
+  body: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the subject to modify.
+  """
+  subjectId: ID! @possibleTypes(concreteTypes: ["Issue", "PullRequest"], abstractType: "IssueOrPullRequest")
+}
+
+"""
+Autogenerated return type of AddComment
+"""
+type AddCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The edge from the subject's comment connection.
+  """
+  commentEdge: IssueCommentEdge
+
+  """
+  The subject
+  """
+  subject: Node
+
+  """
+  The edge from the subject's timeline connection.
+  """
+  timelineEdge: IssueTimelineItemEdge
+}
+
+"""
+Autogenerated input type of AddDiscussionComment
+"""
+input AddDiscussionCommentInput {
+  """
+  The contents of the comment.
+  """
+  body: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion to comment on.
+  """
+  discussionId: ID! @possibleTypes(concreteTypes: ["Discussion"])
+
+  """
+  The Node ID of the discussion comment within this discussion to reply to.
+  """
+  replyToId: ID @possibleTypes(concreteTypes: ["DiscussionComment"])
+}
+
+"""
+Autogenerated return type of AddDiscussionComment
+"""
+type AddDiscussionCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The newly created discussion comment.
+  """
+  comment: DiscussionComment
+}
+
+"""
+Autogenerated input type of AddDiscussionPollVote
+"""
+input AddDiscussionPollVoteInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion poll option to vote for.
+  """
+  pollOptionId: ID! @possibleTypes(concreteTypes: ["DiscussionPollOption"])
+}
+
+"""
+Autogenerated return type of AddDiscussionPollVote
+"""
+type AddDiscussionPollVotePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The poll option that a vote was added to.
+  """
+  pollOption: DiscussionPollOption
+}
+
+"""
+Autogenerated input type of AddEnterpriseOrganizationMember
+"""
+input AddEnterpriseOrganizationMemberInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise which owns the organization.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The ID of the organization the users will be added to.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+
+  """
+  The role to assign the users in the organization
+  """
+  role: OrganizationMemberRole
+
+  """
+  The IDs of the enterprise members to add.
+  """
+  userIds: [ID!]!
+}
+
+"""
+Autogenerated return type of AddEnterpriseOrganizationMember
+"""
+type AddEnterpriseOrganizationMemberPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The users who were added to the organization.
+  """
+  users: [User!]
+}
+
+"""
+Autogenerated input type of AddEnterpriseSupportEntitlement
+"""
+input AddEnterpriseSupportEntitlementInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Enterprise which the admin belongs to.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The login of a member who will receive the support entitlement.
+  """
+  login: String!
+}
+
+"""
+Autogenerated return type of AddEnterpriseSupportEntitlement
+"""
+type AddEnterpriseSupportEntitlementPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A message confirming the result of adding the support entitlement.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of AddLabelsToLabelable
+"""
+input AddLabelsToLabelableInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ids of the labels to add.
+  """
+  labelIds: [ID!]! @possibleTypes(concreteTypes: ["Label"])
+
+  """
+  The id of the labelable object to add labels to.
+  """
+  labelableId: ID! @possibleTypes(concreteTypes: ["Discussion", "Issue", "PullRequest"], abstractType: "Labelable")
+}
+
+"""
+Autogenerated return type of AddLabelsToLabelable
+"""
+type AddLabelsToLabelablePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The item that was labeled.
+  """
+  labelable: Labelable
+}
+
+"""
+Autogenerated input type of AddProjectCard
+"""
+input AddProjectCardInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The content of the card. Must be a member of the ProjectCardItem union
+  """
+  contentId: ID @possibleTypes(concreteTypes: ["Issue", "PullRequest"], abstractType: "ProjectCardItem")
+
+  """
+  The note on the card.
+  """
+  note: String
+
+  """
+  The Node ID of the ProjectColumn.
+  """
+  projectColumnId: ID! @possibleTypes(concreteTypes: ["ProjectColumn"])
+}
+
+"""
+Autogenerated return type of AddProjectCard
+"""
+type AddProjectCardPayload {
+  """
+  The edge from the ProjectColumn's card connection.
+  """
+  cardEdge: ProjectCardEdge
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ProjectColumn
+  """
+  projectColumn: ProjectColumn
+}
+
+"""
+Autogenerated input type of AddProjectColumn
+"""
+input AddProjectColumnInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of the column.
+  """
+  name: String!
+
+  """
+  The Node ID of the project.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["Project"])
+}
+
+"""
+Autogenerated return type of AddProjectColumn
+"""
+type AddProjectColumnPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The edge from the project's column connection.
+  """
+  columnEdge: ProjectColumnEdge
+
+  """
+  The project
+  """
+  project: Project
+}
+
+"""
+Autogenerated input type of AddProjectV2DraftIssue
+"""
+input AddProjectV2DraftIssueInput {
+  """
+  The IDs of the assignees of the draft issue.
+  """
+  assigneeIds: [ID!] @possibleTypes(concreteTypes: ["User"])
+
+  """
+  The body of the draft issue.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Project to add the draft issue to.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  The title of the draft issue. A project item can also be created by providing
+  the URL of an Issue or Pull Request if you have access.
+  """
+  title: String!
+}
+
+"""
+Autogenerated return type of AddProjectV2DraftIssue
+"""
+type AddProjectV2DraftIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The draft issue added to the project.
+  """
+  projectItem: ProjectV2Item
+}
+
+"""
+Autogenerated input type of AddProjectV2ItemById
+"""
+input AddProjectV2ItemByIdInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the Issue or Pull Request to add.
+  """
+  contentId: ID!
+    @possibleTypes(concreteTypes: ["DraftIssue", "Issue", "PullRequest"], abstractType: "ProjectV2ItemContent")
+
+  """
+  The ID of the Project to add the item to.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of AddProjectV2ItemById
+"""
+type AddProjectV2ItemByIdPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The item added to the project.
+  """
+  item: ProjectV2Item
+}
+
+"""
+Autogenerated input type of AddPullRequestReviewComment
+"""
+input AddPullRequestReviewCommentInput {
+  """
+  The text of the comment. This field is required
+
+  **Upcoming Change on 2023-10-01 UTC**
+  **Description:** `body` will be removed. use addPullRequestReviewThread or addPullRequestReviewThreadReply instead
+  **Reason:** We are deprecating the addPullRequestReviewComment mutation
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The SHA of the commit to comment on.
+
+  **Upcoming Change on 2023-10-01 UTC**
+  **Description:** `commitOID` will be removed. use addPullRequestReviewThread or addPullRequestReviewThreadReply instead
+  **Reason:** We are deprecating the addPullRequestReviewComment mutation
+  """
+  commitOID: GitObjectID
+
+  """
+  The comment id to reply to.
+
+  **Upcoming Change on 2023-10-01 UTC**
+  **Description:** `inReplyTo` will be removed. use addPullRequestReviewThread or addPullRequestReviewThreadReply instead
+  **Reason:** We are deprecating the addPullRequestReviewComment mutation
+  """
+  inReplyTo: ID @possibleTypes(concreteTypes: ["PullRequestReviewComment"])
+
+  """
+  The relative path of the file to comment on.
+
+  **Upcoming Change on 2023-10-01 UTC**
+  **Description:** `path` will be removed. use addPullRequestReviewThread or addPullRequestReviewThreadReply instead
+  **Reason:** We are deprecating the addPullRequestReviewComment mutation
+  """
+  path: String
+
+  """
+  The line index in the diff to comment on.
+
+  **Upcoming Change on 2023-10-01 UTC**
+  **Description:** `position` will be removed. use addPullRequestReviewThread or addPullRequestReviewThreadReply instead
+  **Reason:** We are deprecating the addPullRequestReviewComment mutation
+  """
+  position: Int
+
+  """
+  The node ID of the pull request reviewing
+
+  **Upcoming Change on 2023-10-01 UTC**
+  **Description:** `pullRequestId` will be removed. use
+  addPullRequestReviewThread or addPullRequestReviewThreadReply instead
+  **Reason:** We are deprecating the addPullRequestReviewComment mutation
+  """
+  pullRequestId: ID @possibleTypes(concreteTypes: ["PullRequest"])
+
+  """
+  The Node ID of the review to modify.
+
+  **Upcoming Change on 2023-10-01 UTC**
+  **Description:** `pullRequestReviewId` will be removed. use
+  addPullRequestReviewThread or addPullRequestReviewThreadReply instead
+  **Reason:** We are deprecating the addPullRequestReviewComment mutation
+  """
+  pullRequestReviewId: ID @possibleTypes(concreteTypes: ["PullRequestReview"])
+}
+
+"""
+Autogenerated return type of AddPullRequestReviewComment
+"""
+type AddPullRequestReviewCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The newly created comment.
+  """
+  comment: PullRequestReviewComment
+
+  """
+  The edge from the review's comment connection.
+  """
+  commentEdge: PullRequestReviewCommentEdge
+}
+
+"""
+Autogenerated input type of AddPullRequestReview
+"""
+input AddPullRequestReviewInput {
+  """
+  The contents of the review body comment.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The review line comments.
+
+  **Upcoming Change on 2023-10-01 UTC**
+  **Description:** `comments` will be removed. use the `threads` argument instead
+  **Reason:** We are deprecating comment fields that use diff-relative positioning
+  """
+  comments: [DraftPullRequestReviewComment]
+
+  """
+  The commit OID the review pertains to.
+  """
+  commitOID: GitObjectID
+
+  """
+  The event to perform on the pull request review.
+  """
+  event: PullRequestReviewEvent
+
+  """
+  The Node ID of the pull request to modify.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+
+  """
+  The review line comment threads.
+  """
+  threads: [DraftPullRequestReviewThread]
+}
+
+"""
+Autogenerated return type of AddPullRequestReview
+"""
+type AddPullRequestReviewPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The newly created pull request review.
+  """
+  pullRequestReview: PullRequestReview
+
+  """
+  The edge from the pull request's review connection.
+  """
+  reviewEdge: PullRequestReviewEdge
+}
+
+"""
+Autogenerated input type of AddPullRequestReviewThread
+"""
+input AddPullRequestReviewThreadInput {
+  """
+  Body of the thread's first comment.
+  """
+  body: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The line of the blob to which the thread refers, required for line-level
+  threads. The end of the line range for multi-line comments.
+  """
+  line: Int
+
+  """
+  Path to the file being commented on.
+  """
+  path: String!
+
+  """
+  The node ID of the pull request reviewing
+  """
+  pullRequestId: ID @possibleTypes(concreteTypes: ["PullRequest"])
+
+  """
+  The Node ID of the review to modify.
+  """
+  pullRequestReviewId: ID @possibleTypes(concreteTypes: ["PullRequestReview"])
+
+  """
+  The side of the diff on which the line resides. For multi-line comments, this is the side for the end of the line range.
+  """
+  side: DiffSide = RIGHT
+
+  """
+  The first line of the range to which the comment refers.
+  """
+  startLine: Int
+
+  """
+  The side of the diff on which the start line resides.
+  """
+  startSide: DiffSide = RIGHT
+
+  """
+  The level at which the comments in the corresponding thread are targeted, can be a diff line or a file
+  """
+  subjectType: PullRequestReviewThreadSubjectType = LINE
+}
+
+"""
+Autogenerated return type of AddPullRequestReviewThread
+"""
+type AddPullRequestReviewThreadPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The newly created thread.
+  """
+  thread: PullRequestReviewThread
+}
+
+"""
+Autogenerated input type of AddPullRequestReviewThreadReply
+"""
+input AddPullRequestReviewThreadReplyInput {
+  """
+  The text of the reply.
+  """
+  body: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the pending review to which the reply will belong.
+  """
+  pullRequestReviewId: ID @possibleTypes(concreteTypes: ["PullRequestReview"])
+
+  """
+  The Node ID of the thread to which this reply is being written.
+  """
+  pullRequestReviewThreadId: ID! @possibleTypes(concreteTypes: ["PullRequestReviewThread"])
+}
+
+"""
+Autogenerated return type of AddPullRequestReviewThreadReply
+"""
+type AddPullRequestReviewThreadReplyPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The newly created reply.
+  """
+  comment: PullRequestReviewComment
+}
+
+"""
+Autogenerated input type of AddReaction
+"""
+input AddReactionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of the emoji to react with.
+  """
+  content: ReactionContent!
+
+  """
+  The Node ID of the subject to modify.
+  """
+  subjectId: ID!
+    @possibleTypes(
+      concreteTypes: [
+        "CommitComment"
+        "Discussion"
+        "DiscussionComment"
+        "Issue"
+        "IssueComment"
+        "PullRequest"
+        "PullRequestReview"
+        "PullRequestReviewComment"
+        "Release"
+        "TeamDiscussion"
+        "TeamDiscussionComment"
+      ]
+      abstractType: "Reactable"
+    )
+}
+
+"""
+Autogenerated return type of AddReaction
+"""
+type AddReactionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The reaction object.
+  """
+  reaction: Reaction
+
+  """
+  The reaction groups for the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  The reactable subject.
+  """
+  subject: Reactable
+}
+
+"""
+Autogenerated input type of AddStar
+"""
+input AddStarInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Starrable ID to star.
+  """
+  starrableId: ID! @possibleTypes(concreteTypes: ["Gist", "Repository", "Topic"], abstractType: "Starrable")
+}
+
+"""
+Autogenerated return type of AddStar
+"""
+type AddStarPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The starrable.
+  """
+  starrable: Starrable
+}
+
+"""
+Autogenerated input type of AddUpvote
+"""
+input AddUpvoteInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion or comment to upvote.
+  """
+  subjectId: ID! @possibleTypes(concreteTypes: ["Discussion", "DiscussionComment"], abstractType: "Votable")
+}
+
+"""
+Autogenerated return type of AddUpvote
+"""
+type AddUpvotePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The votable subject.
+  """
+  subject: Votable
+}
+
+"""
+Autogenerated input type of AddVerifiableDomain
+"""
+input AddVerifiableDomainInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The URL of the domain
+  """
+  domain: URI!
+
+  """
+  The ID of the owner to add the domain to
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Enterprise", "Organization"], abstractType: "VerifiableDomainOwner")
+}
+
+"""
+Autogenerated return type of AddVerifiableDomain
+"""
+type AddVerifiableDomainPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The verifiable domain that was added.
+  """
+  domain: VerifiableDomain
+}
+
+"""
+Represents an 'added_to_merge_queue' event on a given pull request.
+"""
+type AddedToMergeQueueEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The user who added this Pull Request to the merge queue
+  """
+  enqueuer: User
+
+  """
+  The Node ID of the AddedToMergeQueueEvent object
+  """
+  id: ID!
+
+  """
+  The merge queue where this pull request was added to.
+  """
+  mergeQueue: MergeQueue
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'added_to_project' event on a given issue or pull request.
+"""
+type AddedToProjectEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the AddedToProjectEvent object
+  """
+  id: ID!
+
+  """
+  Project referenced by event.
+  """
+  project: Project @preview(toggledBy: "starfox-preview")
+
+  """
+  Project card referenced by this project event.
+  """
+  projectCard: ProjectCard @preview(toggledBy: "starfox-preview")
+
+  """
+  Column name referenced by this project event.
+  """
+  projectColumnName: String! @preview(toggledBy: "starfox-preview")
+}
+
+"""
+Represents an announcement banner.
+"""
+interface AnnouncementBanner {
+  """
+  The text of the announcement
+  """
+  announcement: String
+
+  """
+  The expiration date of the announcement, if any
+  """
+  announcementExpiresAt: DateTime
+
+  """
+  Whether the announcement can be dismissed by the user
+  """
+  announcementUserDismissible: Boolean
+}
+
+"""
+A GitHub App.
+"""
+type App implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The description of the app.
+  """
+  description: String
+
+  """
+  The Node ID of the App object
+  """
+  id: ID!
+
+  """
+  The IP addresses of the app.
+  """
+  ipAllowListEntries(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for IP allow list entries returned.
+    """
+    orderBy: IpAllowListEntryOrder = {field: ALLOW_LIST_VALUE, direction: ASC}
+  ): IpAllowListEntryConnection!
+
+  """
+  The hex color code, without the leading '#', for the logo background.
+  """
+  logoBackgroundColor: String!
+
+  """
+  A URL pointing to the app's logo.
+  """
+  logoUrl(
+    """
+    The size of the resulting image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  The name of the app.
+  """
+  name: String!
+
+  """
+  A slug based on the name of the app for use in URLs.
+  """
+  slug: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The URL to the app's homepage.
+  """
+  url: URI!
+}
+
+"""
+Autogenerated input type of ApproveDeployments
+"""
+input ApproveDeploymentsInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Optional comment for approving deployments
+  """
+  comment: String = ""
+
+  """
+  The ids of environments to reject deployments
+  """
+  environmentIds: [ID!]!
+
+  """
+  The node ID of the workflow run containing the pending deployments.
+  """
+  workflowRunId: ID! @possibleTypes(concreteTypes: ["WorkflowRun"])
+}
+
+"""
+Autogenerated return type of ApproveDeployments
+"""
+type ApproveDeploymentsPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The affected deployments.
+  """
+  deployments: [Deployment!]
+}
+
+"""
+Autogenerated input type of ApproveVerifiableDomain
+"""
+input ApproveVerifiableDomainInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the verifiable domain to approve.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["VerifiableDomain"])
+}
+
+"""
+Autogenerated return type of ApproveVerifiableDomain
+"""
+type ApproveVerifiableDomainPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The verifiable domain that was approved.
+  """
+  domain: VerifiableDomain
+}
+
+"""
+Autogenerated input type of ArchiveProjectV2Item
+"""
+input ArchiveProjectV2ItemInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the ProjectV2Item to archive.
+  """
+  itemId: ID! @possibleTypes(concreteTypes: ["ProjectV2Item"])
+
+  """
+  The ID of the Project to archive the item from.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of ArchiveProjectV2Item
+"""
+type ArchiveProjectV2ItemPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The item archived from the project.
+  """
+  item: ProjectV2Item
+}
+
+"""
+Autogenerated input type of ArchiveRepository
+"""
+input ArchiveRepositoryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the repository to mark as archived.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of ArchiveRepository
+"""
+type ArchiveRepositoryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The repository that was marked as archived.
+  """
+  repository: Repository
+}
+
+"""
+An object that can have users assigned to it.
+"""
+interface Assignable {
+  """
+  A list of Users assigned to this object.
+  """
+  assignees(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+}
+
+"""
+Represents an 'assigned' event on any assignable object.
+"""
+type AssignedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the assignable associated with the event.
+  """
+  assignable: Assignable!
+
+  """
+  Identifies the user or mannequin that was assigned.
+  """
+  assignee: Assignee
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the AssignedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the user who was assigned.
+  """
+  user: User
+    @deprecated(reason: "Assignees can now be mannequins. Use the `assignee` field instead. Removal on 2020-01-01 UTC.")
+}
+
+"""
+Types that can be assigned to issues.
+"""
+union Assignee = Bot | Mannequin | Organization | User
+
+"""
+An entry in the audit log.
+"""
+interface AuditEntry {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Types that can initiate an audit log event.
+"""
+union AuditEntryActor = Bot | Organization | User
+
+"""
+Ordering options for Audit Log connections.
+"""
+input AuditLogOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection
+
+  """
+  The field to order Audit Logs by.
+  """
+  field: AuditLogOrderField
+}
+
+"""
+Properties by which Audit Log connections can be ordered.
+"""
+enum AuditLogOrderField {
+  """
+  Order audit log entries by timestamp
+  """
+  CREATED_AT
+}
+
+"""
+Represents a 'auto_merge_disabled' event on a given pull request.
+"""
+type AutoMergeDisabledEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The user who disabled auto-merge for this Pull Request
+  """
+  disabler: User
+
+  """
+  The Node ID of the AutoMergeDisabledEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event
+  """
+  pullRequest: PullRequest
+
+  """
+  The reason auto-merge was disabled
+  """
+  reason: String
+
+  """
+  The reason_code relating to why auto-merge was disabled
+  """
+  reasonCode: String
+}
+
+"""
+Represents a 'auto_merge_enabled' event on a given pull request.
+"""
+type AutoMergeEnabledEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The user who enabled auto-merge for this Pull Request
+  """
+  enabler: User
+
+  """
+  The Node ID of the AutoMergeEnabledEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents an auto-merge request for a pull request
+"""
+type AutoMergeRequest {
+  """
+  The email address of the author of this auto-merge request.
+  """
+  authorEmail: String
+
+  """
+  The commit message of the auto-merge request. If a merge queue is required by
+  the base branch, this value will be set by the merge queue when merging.
+  """
+  commitBody: String
+
+  """
+  The commit title of the auto-merge request. If a merge queue is required by
+  the base branch, this value will be set by the merge queue when merging
+  """
+  commitHeadline: String
+
+  """
+  When was this auto-merge request was enabled.
+  """
+  enabledAt: DateTime
+
+  """
+  The actor who created the auto-merge request.
+  """
+  enabledBy: Actor
+
+  """
+  The merge method of the auto-merge request. If a merge queue is required by
+  the base branch, this value will be set by the merge queue when merging.
+  """
+  mergeMethod: PullRequestMergeMethod!
+
+  """
+  The pull request that this auto-merge request is set against.
+  """
+  pullRequest: PullRequest!
+}
+
+"""
+Represents a 'auto_rebase_enabled' event on a given pull request.
+"""
+type AutoRebaseEnabledEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The user who enabled auto-merge (rebase) for this Pull Request
+  """
+  enabler: User
+
+  """
+  The Node ID of the AutoRebaseEnabledEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'auto_squash_enabled' event on a given pull request.
+"""
+type AutoSquashEnabledEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The user who enabled auto-merge (squash) for this Pull Request
+  """
+  enabler: User
+
+  """
+  The Node ID of the AutoSquashEnabledEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'automatic_base_change_failed' event on a given pull request.
+"""
+type AutomaticBaseChangeFailedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the AutomaticBaseChangeFailedEvent object
+  """
+  id: ID!
+
+  """
+  The new base for this PR
+  """
+  newBase: String!
+
+  """
+  The old base for this PR
+  """
+  oldBase: String!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+}
+
+"""
+Represents a 'automatic_base_change_succeeded' event on a given pull request.
+"""
+type AutomaticBaseChangeSucceededEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the AutomaticBaseChangeSucceededEvent object
+  """
+  id: ID!
+
+  """
+  The new base for this PR
+  """
+  newBase: String!
+
+  """
+  The old base for this PR
+  """
+  oldBase: String!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+}
+
+"""
+A (potentially binary) string encoded using base64.
+"""
+scalar Base64String
+
+"""
+Represents a 'base_ref_changed' event on a given issue or pull request.
+"""
+type BaseRefChangedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the name of the base ref for the pull request after it was changed.
+  """
+  currentRefName: String!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the BaseRefChangedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the name of the base ref for the pull request before it was changed.
+  """
+  previousRefName: String!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+}
+
+"""
+Represents a 'base_ref_deleted' event on a given pull request.
+"""
+type BaseRefDeletedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the name of the Ref associated with the `base_ref_deleted` event.
+  """
+  baseRefName: String
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the BaseRefDeletedEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'base_ref_force_pushed' event on a given pull request.
+"""
+type BaseRefForcePushedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the after commit SHA for the 'base_ref_force_pushed' event.
+  """
+  afterCommit: Commit
+
+  """
+  Identifies the before commit SHA for the 'base_ref_force_pushed' event.
+  """
+  beforeCommit: Commit
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the BaseRefForcePushedEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  Identifies the fully qualified ref name for the 'base_ref_force_pushed' event.
+  """
+  ref: Ref
+}
+
+"""
+Represents non-fractional signed whole numeric values. Since the value may
+exceed the size of a 32-bit integer, it's encoded as a string.
+"""
+scalar BigInt
+
+"""
+Represents a Git blame.
+"""
+type Blame {
+  """
+  The list of ranges from a Git blame.
+  """
+  ranges: [BlameRange!]!
+}
+
+"""
+Represents a range of information from a Git blame.
+"""
+type BlameRange {
+  """
+  Identifies the recency of the change, from 1 (new) to 10 (old). This is
+  calculated as a 2-quantile and determines the length of distance between the
+  median age of all the changes in the file and the recency of the current
+  range's change.
+  """
+  age: Int!
+
+  """
+  Identifies the line author
+  """
+  commit: Commit!
+
+  """
+  The ending line for the range
+  """
+  endingLine: Int!
+
+  """
+  The starting line for the range
+  """
+  startingLine: Int!
+}
+
+"""
+Represents a Git blob.
+"""
+type Blob implements GitObject & Node {
+  """
+  An abbreviated version of the Git object ID
+  """
+  abbreviatedOid: String!
+
+  """
+  Byte size of Blob object
+  """
+  byteSize: Int!
+
+  """
+  The HTTP path for this Git object
+  """
+  commitResourcePath: URI!
+
+  """
+  The HTTP URL for this Git object
+  """
+  commitUrl: URI!
+
+  """
+  The Node ID of the Blob object
+  """
+  id: ID!
+
+  """
+  Indicates whether the Blob is binary or text. Returns null if unable to determine the encoding.
+  """
+  isBinary: Boolean
+
+  """
+  Indicates whether the contents is truncated
+  """
+  isTruncated: Boolean!
+
+  """
+  The Git object ID
+  """
+  oid: GitObjectID!
+
+  """
+  The Repository the Git object belongs to
+  """
+  repository: Repository!
+
+  """
+  UTF8 text data or null if the Blob is binary
+  """
+  text: String
+}
+
+"""
+A special type of user which takes actions on behalf of GitHub Apps.
+"""
+type Bot implements Actor & Node & UniformResourceLocatable {
+  """
+  A URL pointing to the GitHub App's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the Bot object
+  """
+  id: ID!
+
+  """
+  The username of the actor.
+  """
+  login: String!
+
+  """
+  The HTTP path for this bot
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this bot
+  """
+  url: URI!
+}
+
+"""
+Types which can be actors for `BranchActorAllowance` objects.
+"""
+union BranchActorAllowanceActor = App | Team | User
+
+"""
+Parameters to be used for the branch_name_pattern rule
+"""
+type BranchNamePatternParameters {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean!
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+Parameters to be used for the branch_name_pattern rule
+"""
+input BranchNamePatternParametersInput {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+A branch protection rule.
+"""
+type BranchProtectionRule implements Node {
+  """
+  Can this branch be deleted.
+  """
+  allowsDeletions: Boolean!
+
+  """
+  Are force pushes allowed on this branch.
+  """
+  allowsForcePushes: Boolean!
+
+  """
+  Is branch creation a protected operation.
+  """
+  blocksCreations: Boolean!
+
+  """
+  A list of conflicts matching branches protection rule and other branch protection rules
+  """
+  branchProtectionRuleConflicts(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): BranchProtectionRuleConflictConnection!
+
+  """
+  A list of actors able to force push for this branch protection rule.
+  """
+  bypassForcePushAllowances(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): BypassForcePushAllowanceConnection!
+
+  """
+  A list of actors able to bypass PRs for this branch protection rule.
+  """
+  bypassPullRequestAllowances(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): BypassPullRequestAllowanceConnection!
+
+  """
+  The actor who created this branch protection rule.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  Will new commits pushed to matching branches dismiss pull request review approvals.
+  """
+  dismissesStaleReviews: Boolean!
+
+  """
+  The Node ID of the BranchProtectionRule object
+  """
+  id: ID!
+
+  """
+  Can admins overwrite branch protection.
+  """
+  isAdminEnforced: Boolean!
+
+  """
+  Whether users can pull changes from upstream when the branch is locked. Set to
+  `true` to allow fork syncing. Set to `false` to prevent fork syncing.
+  """
+  lockAllowsFetchAndMerge: Boolean!
+
+  """
+  Whether to set the branch as read-only. If this is true, users will not be able to push to the branch.
+  """
+  lockBranch: Boolean!
+
+  """
+  Repository refs that are protected by this rule
+  """
+  matchingRefs(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filters refs with query on name
+    """
+    query: String
+  ): RefConnection!
+
+  """
+  Identifies the protection rule pattern.
+  """
+  pattern: String!
+
+  """
+  A list push allowances for this branch protection rule.
+  """
+  pushAllowances(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PushAllowanceConnection!
+
+  """
+  The repository associated with this branch protection rule.
+  """
+  repository: Repository
+
+  """
+  Whether the most recent push must be approved by someone other than the person who pushed it
+  """
+  requireLastPushApproval: Boolean!
+
+  """
+  Number of approving reviews required to update matching branches.
+  """
+  requiredApprovingReviewCount: Int
+
+  """
+  List of required deployment environments that must be deployed successfully to update matching branches
+  """
+  requiredDeploymentEnvironments: [String]
+
+  """
+  List of required status check contexts that must pass for commits to be accepted to matching branches.
+  """
+  requiredStatusCheckContexts: [String]
+
+  """
+  List of required status checks that must pass for commits to be accepted to matching branches.
+  """
+  requiredStatusChecks: [RequiredStatusCheckDescription!]
+
+  """
+  Are approving reviews required to update matching branches.
+  """
+  requiresApprovingReviews: Boolean!
+
+  """
+  Are reviews from code owners required to update matching branches.
+  """
+  requiresCodeOwnerReviews: Boolean!
+
+  """
+  Are commits required to be signed.
+  """
+  requiresCommitSignatures: Boolean!
+
+  """
+  Are conversations required to be resolved before merging.
+  """
+  requiresConversationResolution: Boolean!
+
+  """
+  Does this branch require deployment to specific environments before merging
+  """
+  requiresDeployments: Boolean!
+
+  """
+  Are merge commits prohibited from being pushed to this branch.
+  """
+  requiresLinearHistory: Boolean!
+
+  """
+  Are status checks required to update matching branches.
+  """
+  requiresStatusChecks: Boolean!
+
+  """
+  Are branches required to be up to date before merging.
+  """
+  requiresStrictStatusChecks: Boolean!
+
+  """
+  Is pushing to matching branches restricted.
+  """
+  restrictsPushes: Boolean!
+
+  """
+  Is dismissal of pull request reviews restricted.
+  """
+  restrictsReviewDismissals: Boolean!
+
+  """
+  A list review dismissal allowances for this branch protection rule.
+  """
+  reviewDismissalAllowances(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ReviewDismissalAllowanceConnection!
+}
+
+"""
+A conflict between two branch protection rules.
+"""
+type BranchProtectionRuleConflict {
+  """
+  Identifies the branch protection rule.
+  """
+  branchProtectionRule: BranchProtectionRule
+
+  """
+  Identifies the conflicting branch protection rule.
+  """
+  conflictingBranchProtectionRule: BranchProtectionRule
+
+  """
+  Identifies the branch ref that has conflicting rules
+  """
+  ref: Ref
+}
+
+"""
+The connection type for BranchProtectionRuleConflict.
+"""
+type BranchProtectionRuleConflictConnection {
+  """
+  A list of edges.
+  """
+  edges: [BranchProtectionRuleConflictEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [BranchProtectionRuleConflict]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type BranchProtectionRuleConflictEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: BranchProtectionRuleConflict
+}
+
+"""
+The connection type for BranchProtectionRule.
+"""
+type BranchProtectionRuleConnection {
+  """
+  A list of edges.
+  """
+  edges: [BranchProtectionRuleEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [BranchProtectionRule]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type BranchProtectionRuleEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: BranchProtectionRule
+}
+
+"""
+Information about a sponsorship to make for a user or organization with a GitHub
+Sponsors profile, as part of sponsoring many users or organizations at once.
+"""
+input BulkSponsorship {
+  """
+  The amount to pay to the sponsorable in US dollars. Valid values: 1-12000.
+  """
+  amount: Int!
+
+  """
+  The ID of the user or organization who is receiving the sponsorship. Required if sponsorableLogin is not given.
+  """
+  sponsorableId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "Sponsorable")
+
+  """
+  The username of the user or organization who is receiving the sponsorship. Required if sponsorableId is not given.
+  """
+  sponsorableLogin: String
+}
+
+"""
+Types that can represent a repository ruleset bypass actor.
+"""
+union BypassActor = App | Team
+
+"""
+A user, team, or app who has the ability to bypass a force push requirement on a protected branch.
+"""
+type BypassForcePushAllowance implements Node {
+  """
+  The actor that can force push.
+  """
+  actor: BranchActorAllowanceActor
+
+  """
+  Identifies the branch protection rule associated with the allowed user, team, or app.
+  """
+  branchProtectionRule: BranchProtectionRule
+
+  """
+  The Node ID of the BypassForcePushAllowance object
+  """
+  id: ID!
+}
+
+"""
+The connection type for BypassForcePushAllowance.
+"""
+type BypassForcePushAllowanceConnection {
+  """
+  A list of edges.
+  """
+  edges: [BypassForcePushAllowanceEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [BypassForcePushAllowance]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type BypassForcePushAllowanceEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: BypassForcePushAllowance
+}
+
+"""
+A user, team, or app who has the ability to bypass a pull request requirement on a protected branch.
+"""
+type BypassPullRequestAllowance implements Node {
+  """
+  The actor that can bypass.
+  """
+  actor: BranchActorAllowanceActor
+
+  """
+  Identifies the branch protection rule associated with the allowed user, team, or app.
+  """
+  branchProtectionRule: BranchProtectionRule
+
+  """
+  The Node ID of the BypassPullRequestAllowance object
+  """
+  id: ID!
+}
+
+"""
+The connection type for BypassPullRequestAllowance.
+"""
+type BypassPullRequestAllowanceConnection {
+  """
+  A list of edges.
+  """
+  edges: [BypassPullRequestAllowanceEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [BypassPullRequestAllowance]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type BypassPullRequestAllowanceEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: BypassPullRequestAllowance
+}
+
+"""
+The Common Vulnerability Scoring System
+"""
+type CVSS {
+  """
+  The CVSS score associated with this advisory
+  """
+  score: Float!
+
+  """
+  The CVSS vector string associated with this advisory
+  """
+  vectorString: String
+}
+
+"""
+A common weakness enumeration
+"""
+type CWE implements Node {
+  """
+  The id of the CWE
+  """
+  cweId: String!
+
+  """
+  A detailed description of this CWE
+  """
+  description: String!
+
+  """
+  The Node ID of the CWE object
+  """
+  id: ID!
+
+  """
+  The name of this CWE
+  """
+  name: String!
+}
+
+"""
+The connection type for CWE.
+"""
+type CWEConnection {
+  """
+  A list of edges.
+  """
+  edges: [CWEEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CWE]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CWEEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CWE
+}
+
+"""
+Autogenerated input type of CancelEnterpriseAdminInvitation
+"""
+input CancelEnterpriseAdminInvitationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the pending enterprise administrator invitation.
+  """
+  invitationId: ID! @possibleTypes(concreteTypes: ["EnterpriseAdministratorInvitation"])
+}
+
+"""
+Autogenerated return type of CancelEnterpriseAdminInvitation
+"""
+type CancelEnterpriseAdminInvitationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The invitation that was canceled.
+  """
+  invitation: EnterpriseAdministratorInvitation
+
+  """
+  A message confirming the result of canceling an administrator invitation.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of CancelSponsorship
+"""
+input CancelSponsorshipInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the user or organization who is acting as the sponsor, paying for
+  the sponsorship. Required if sponsorLogin is not given.
+  """
+  sponsorId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "Sponsor")
+
+  """
+  The username of the user or organization who is acting as the sponsor, paying
+  for the sponsorship. Required if sponsorId is not given.
+  """
+  sponsorLogin: String
+
+  """
+  The ID of the user or organization who is receiving the sponsorship. Required if sponsorableLogin is not given.
+  """
+  sponsorableId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "Sponsorable")
+
+  """
+  The username of the user or organization who is receiving the sponsorship. Required if sponsorableId is not given.
+  """
+  sponsorableLogin: String
+}
+
+"""
+Autogenerated return type of CancelSponsorship
+"""
+type CancelSponsorshipPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The tier that was being used at the time of cancellation.
+  """
+  sponsorsTier: SponsorsTier
+}
+
+"""
+Autogenerated input type of ChangeUserStatus
+"""
+input ChangeUserStatusInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The emoji to represent your status. Can either be a native Unicode emoji or an emoji name with colons, e.g., :grinning:.
+  """
+  emoji: String
+
+  """
+  If set, the user status will not be shown after this date.
+  """
+  expiresAt: DateTime
+
+  """
+  Whether this status should indicate you are not fully available on GitHub, e.g., you are away.
+  """
+  limitedAvailability: Boolean = false
+
+  """
+  A short description of your current status.
+  """
+  message: String
+
+  """
+  The ID of the organization whose members will be allowed to see the status. If
+  omitted, the status will be publicly visible.
+  """
+  organizationId: ID @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of ChangeUserStatus
+"""
+type ChangeUserStatusPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Your updated status.
+  """
+  status: UserStatus
+}
+
+"""
+A single check annotation.
+"""
+type CheckAnnotation {
+  """
+  The annotation's severity level.
+  """
+  annotationLevel: CheckAnnotationLevel
+
+  """
+  The path to the file that this annotation was made on.
+  """
+  blobUrl: URI!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The position of this annotation.
+  """
+  location: CheckAnnotationSpan!
+
+  """
+  The annotation's message.
+  """
+  message: String!
+
+  """
+  The path that this annotation was made on.
+  """
+  path: String!
+
+  """
+  Additional information about the annotation.
+  """
+  rawDetails: String
+
+  """
+  The annotation's title
+  """
+  title: String
+}
+
+"""
+The connection type for CheckAnnotation.
+"""
+type CheckAnnotationConnection {
+  """
+  A list of edges.
+  """
+  edges: [CheckAnnotationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CheckAnnotation]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Information from a check run analysis to specific lines of code.
+"""
+input CheckAnnotationData {
+  """
+  Represents an annotation's information level
+  """
+  annotationLevel: CheckAnnotationLevel!
+
+  """
+  The location of the annotation
+  """
+  location: CheckAnnotationRange!
+
+  """
+  A short description of the feedback for these lines of code.
+  """
+  message: String!
+
+  """
+  The path of the file to add an annotation to.
+  """
+  path: String!
+
+  """
+  Details about this annotation.
+  """
+  rawDetails: String
+
+  """
+  The title that represents the annotation.
+  """
+  title: String
+}
+
+"""
+An edge in a connection.
+"""
+type CheckAnnotationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CheckAnnotation
+}
+
+"""
+Represents an annotation's information level.
+"""
+enum CheckAnnotationLevel {
+  """
+  An annotation indicating an inescapable error.
+  """
+  FAILURE
+
+  """
+  An annotation indicating some information.
+  """
+  NOTICE
+
+  """
+  An annotation indicating an ignorable error.
+  """
+  WARNING
+}
+
+"""
+A character position in a check annotation.
+"""
+type CheckAnnotationPosition {
+  """
+  Column number (1 indexed).
+  """
+  column: Int
+
+  """
+  Line number (1 indexed).
+  """
+  line: Int!
+}
+
+"""
+Information from a check run analysis to specific lines of code.
+"""
+input CheckAnnotationRange {
+  """
+  The ending column of the range.
+  """
+  endColumn: Int
+
+  """
+  The ending line of the range.
+  """
+  endLine: Int!
+
+  """
+  The starting column of the range.
+  """
+  startColumn: Int
+
+  """
+  The starting line of the range.
+  """
+  startLine: Int!
+}
+
+"""
+An inclusive pair of positions for a check annotation.
+"""
+type CheckAnnotationSpan {
+  """
+  End position (inclusive).
+  """
+  end: CheckAnnotationPosition!
+
+  """
+  Start position (inclusive).
+  """
+  start: CheckAnnotationPosition!
+}
+
+"""
+The possible states for a check suite or run conclusion.
+"""
+enum CheckConclusionState {
+  """
+  The check suite or run requires action.
+  """
+  ACTION_REQUIRED
+
+  """
+  The check suite or run has been cancelled.
+  """
+  CANCELLED
+
+  """
+  The check suite or run has failed.
+  """
+  FAILURE
+
+  """
+  The check suite or run was neutral.
+  """
+  NEUTRAL
+
+  """
+  The check suite or run was skipped.
+  """
+  SKIPPED
+
+  """
+  The check suite or run was marked stale by GitHub. Only GitHub can use this conclusion.
+  """
+  STALE
+
+  """
+  The check suite or run has failed at startup.
+  """
+  STARTUP_FAILURE
+
+  """
+  The check suite or run has succeeded.
+  """
+  SUCCESS
+
+  """
+  The check suite or run has timed out.
+  """
+  TIMED_OUT
+}
+
+"""
+A check run.
+"""
+type CheckRun implements Node & RequirableByPullRequest & UniformResourceLocatable {
+  """
+  The check run's annotations
+  """
+  annotations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CheckAnnotationConnection
+
+  """
+  The check suite that this run is a part of.
+  """
+  checkSuite: CheckSuite!
+
+  """
+  Identifies the date and time when the check run was completed.
+  """
+  completedAt: DateTime
+
+  """
+  The conclusion of the check run.
+  """
+  conclusion: CheckConclusionState
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The corresponding deployment for this job, if any
+  """
+  deployment: Deployment
+
+  """
+  The URL from which to find full details of the check run on the integrator's site.
+  """
+  detailsUrl: URI
+
+  """
+  A reference for the check run on the integrator's system.
+  """
+  externalId: String
+
+  """
+  The Node ID of the CheckRun object
+  """
+  id: ID!
+
+  """
+  Whether this is required to pass before merging for a specific pull request.
+  """
+  isRequired(
+    """
+    The id of the pull request this is required for
+    """
+    pullRequestId: ID
+
+    """
+    The number of the pull request this is required for
+    """
+    pullRequestNumber: Int
+  ): Boolean!
+
+  """
+  The name of the check for this check run.
+  """
+  name: String!
+
+  """
+  Information about a pending deployment, if any, in this check run
+  """
+  pendingDeploymentRequest: DeploymentRequest
+
+  """
+  The permalink to the check run summary.
+  """
+  permalink: URI!
+
+  """
+  The repository associated with this check run.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this check run.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the check run was started.
+  """
+  startedAt: DateTime
+
+  """
+  The current status of the check run.
+  """
+  status: CheckStatusState!
+
+  """
+  The check run's steps
+  """
+  steps(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Step number
+    """
+    number: Int
+  ): CheckStepConnection
+
+  """
+  A string representing the check run's summary
+  """
+  summary: String
+
+  """
+  A string representing the check run's text
+  """
+  text: String
+
+  """
+  A string representing the check run
+  """
+  title: String
+
+  """
+  The HTTP URL for this check run.
+  """
+  url: URI!
+}
+
+"""
+Possible further actions the integrator can perform.
+"""
+input CheckRunAction {
+  """
+  A short explanation of what this action would do.
+  """
+  description: String!
+
+  """
+  A reference for the action on the integrator's system.
+  """
+  identifier: String!
+
+  """
+  The text to be displayed on a button in the web UI.
+  """
+  label: String!
+}
+
+"""
+The connection type for CheckRun.
+"""
+type CheckRunConnection {
+  """
+  A list of edges.
+  """
+  edges: [CheckRunEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CheckRun]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CheckRunEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CheckRun
+}
+
+"""
+The filters that are available when fetching check runs.
+"""
+input CheckRunFilter {
+  """
+  Filters the check runs created by this application ID.
+  """
+  appId: Int
+
+  """
+  Filters the check runs by this name.
+  """
+  checkName: String
+
+  """
+  Filters the check runs by this type.
+  """
+  checkType: CheckRunType
+
+  """
+  Filters the check runs by these conclusions.
+  """
+  conclusions: [CheckConclusionState!]
+
+  """
+  Filters the check runs by this status. Superseded by statuses.
+  """
+  status: CheckStatusState
+
+  """
+  Filters the check runs by this status. Overrides status.
+  """
+  statuses: [CheckStatusState!]
+}
+
+"""
+Descriptive details about the check run.
+"""
+input CheckRunOutput {
+  """
+  The annotations that are made as part of the check run.
+  """
+  annotations: [CheckAnnotationData!]
+
+  """
+  Images attached to the check run output displayed in the GitHub pull request UI.
+  """
+  images: [CheckRunOutputImage!]
+
+  """
+  The summary of the check run (supports Commonmark).
+  """
+  summary: String!
+
+  """
+  The details of the check run (supports Commonmark).
+  """
+  text: String
+
+  """
+  A title to provide for this check run.
+  """
+  title: String!
+}
+
+"""
+Images attached to the check run output displayed in the GitHub pull request UI.
+"""
+input CheckRunOutputImage {
+  """
+  The alternative text for the image.
+  """
+  alt: String!
+
+  """
+  A short image description.
+  """
+  caption: String
+
+  """
+  The full URL of the image.
+  """
+  imageUrl: URI!
+}
+
+"""
+The possible states of a check run in a status rollup.
+"""
+enum CheckRunState {
+  """
+  The check run requires action.
+  """
+  ACTION_REQUIRED
+
+  """
+  The check run has been cancelled.
+  """
+  CANCELLED
+
+  """
+  The check run has been completed.
+  """
+  COMPLETED
+
+  """
+  The check run has failed.
+  """
+  FAILURE
+
+  """
+  The check run is in progress.
+  """
+  IN_PROGRESS
+
+  """
+  The check run was neutral.
+  """
+  NEUTRAL
+
+  """
+  The check run is in pending state.
+  """
+  PENDING
+
+  """
+  The check run has been queued.
+  """
+  QUEUED
+
+  """
+  The check run was skipped.
+  """
+  SKIPPED
+
+  """
+  The check run was marked stale by GitHub. Only GitHub can use this conclusion.
+  """
+  STALE
+
+  """
+  The check run has failed at startup.
+  """
+  STARTUP_FAILURE
+
+  """
+  The check run has succeeded.
+  """
+  SUCCESS
+
+  """
+  The check run has timed out.
+  """
+  TIMED_OUT
+
+  """
+  The check run is in waiting state.
+  """
+  WAITING
+}
+
+"""
+Represents a count of the state of a check run.
+"""
+type CheckRunStateCount {
+  """
+  The number of check runs with this state.
+  """
+  count: Int!
+
+  """
+  The state of a check run.
+  """
+  state: CheckRunState!
+}
+
+"""
+The possible types of check runs.
+"""
+enum CheckRunType {
+  """
+  Every check run available.
+  """
+  ALL
+
+  """
+  The latest check run.
+  """
+  LATEST
+}
+
+"""
+The possible states for a check suite or run status.
+"""
+enum CheckStatusState {
+  """
+  The check suite or run has been completed.
+  """
+  COMPLETED
+
+  """
+  The check suite or run is in progress.
+  """
+  IN_PROGRESS
+
+  """
+  The check suite or run is in pending state.
+  """
+  PENDING
+
+  """
+  The check suite or run has been queued.
+  """
+  QUEUED
+
+  """
+  The check suite or run has been requested.
+  """
+  REQUESTED
+
+  """
+  The check suite or run is in waiting state.
+  """
+  WAITING
+}
+
+"""
+A single check step.
+"""
+type CheckStep {
+  """
+  Identifies the date and time when the check step was completed.
+  """
+  completedAt: DateTime
+
+  """
+  The conclusion of the check step.
+  """
+  conclusion: CheckConclusionState
+
+  """
+  A reference for the check step on the integrator's system.
+  """
+  externalId: String
+
+  """
+  The step's name.
+  """
+  name: String!
+
+  """
+  The index of the step in the list of steps of the parent check run.
+  """
+  number: Int!
+
+  """
+  Number of seconds to completion.
+  """
+  secondsToCompletion: Int
+
+  """
+  Identifies the date and time when the check step was started.
+  """
+  startedAt: DateTime
+
+  """
+  The current status of the check step.
+  """
+  status: CheckStatusState!
+}
+
+"""
+The connection type for CheckStep.
+"""
+type CheckStepConnection {
+  """
+  A list of edges.
+  """
+  edges: [CheckStepEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CheckStep]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CheckStepEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CheckStep
+}
+
+"""
+A check suite.
+"""
+type CheckSuite implements Node {
+  """
+  The GitHub App which created this check suite.
+  """
+  app: App
+
+  """
+  The name of the branch for this check suite.
+  """
+  branch: Ref
+
+  """
+  The check runs associated with a check suite.
+  """
+  checkRuns(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filters the check runs by this type.
+    """
+    filterBy: CheckRunFilter
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CheckRunConnection
+
+  """
+  The commit for this check suite
+  """
+  commit: Commit!
+
+  """
+  The conclusion of this check suite.
+  """
+  conclusion: CheckConclusionState
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The user who triggered the check suite.
+  """
+  creator: User
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the CheckSuite object
+  """
+  id: ID!
+
+  """
+  A list of open pull requests matching the check suite.
+  """
+  matchingPullRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    The base ref name to filter the pull requests by.
+    """
+    baseRefName: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    The head ref name to filter the pull requests by.
+    """
+    headRefName: String
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pull requests returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the pull requests by.
+    """
+    states: [PullRequestState!]
+  ): PullRequestConnection
+
+  """
+  The push that triggered this check suite.
+  """
+  push: Push
+
+  """
+  The repository associated with this check suite.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this check suite
+  """
+  resourcePath: URI!
+
+  """
+  The status of this check suite.
+  """
+  status: CheckStatusState!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this check suite
+  """
+  url: URI!
+
+  """
+  The workflow run associated with this check suite.
+  """
+  workflowRun: WorkflowRun
+}
+
+"""
+The auto-trigger preferences that are available for check suites.
+"""
+input CheckSuiteAutoTriggerPreference {
+  """
+  The node ID of the application that owns the check suite.
+  """
+  appId: ID!
+
+  """
+  Set to `true` to enable automatic creation of CheckSuite events upon pushes to the repository.
+  """
+  setting: Boolean!
+}
+
+"""
+The connection type for CheckSuite.
+"""
+type CheckSuiteConnection {
+  """
+  A list of edges.
+  """
+  edges: [CheckSuiteEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CheckSuite]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CheckSuiteEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CheckSuite
+}
+
+"""
+The filters that are available when fetching check suites.
+"""
+input CheckSuiteFilter {
+  """
+  Filters the check suites created by this application ID.
+  """
+  appId: Int
+
+  """
+  Filters the check suites by this name.
+  """
+  checkName: String
+}
+
+"""
+An object which can have its data claimed or claim data from another.
+"""
+union Claimable = Mannequin | User
+
+"""
+Autogenerated input type of ClearLabelsFromLabelable
+"""
+input ClearLabelsFromLabelableInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the labelable object to clear the labels from.
+  """
+  labelableId: ID! @possibleTypes(concreteTypes: ["Discussion", "Issue", "PullRequest"], abstractType: "Labelable")
+}
+
+"""
+Autogenerated return type of ClearLabelsFromLabelable
+"""
+type ClearLabelsFromLabelablePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The item that was unlabeled.
+  """
+  labelable: Labelable
+}
+
+"""
+Autogenerated input type of ClearProjectV2ItemFieldValue
+"""
+input ClearProjectV2ItemFieldValueInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the field to be cleared.
+  """
+  fieldId: ID!
+    @possibleTypes(
+      concreteTypes: ["ProjectV2Field", "ProjectV2IterationField", "ProjectV2SingleSelectField"]
+      abstractType: "ProjectV2FieldConfiguration"
+    )
+
+  """
+  The ID of the item to be cleared.
+  """
+  itemId: ID! @possibleTypes(concreteTypes: ["ProjectV2Item"])
+
+  """
+  The ID of the Project.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of ClearProjectV2ItemFieldValue
+"""
+type ClearProjectV2ItemFieldValuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated item.
+  """
+  projectV2Item: ProjectV2Item
+}
+
+"""
+Autogenerated input type of CloneProject
+"""
+input CloneProjectInput {
+  """
+  The description of the project.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Whether or not to clone the source project's workflows.
+  """
+  includeWorkflows: Boolean!
+
+  """
+  The name of the project.
+  """
+  name: String!
+
+  """
+  The visibility of the project, defaults to false (private).
+  """
+  public: Boolean
+
+  """
+  The source project to clone.
+  """
+  sourceId: ID! @possibleTypes(concreteTypes: ["Project"])
+
+  """
+  The owner ID to create the project under.
+  """
+  targetOwnerId: ID! @possibleTypes(concreteTypes: ["Organization", "Repository", "User"], abstractType: "ProjectOwner")
+}
+
+"""
+Autogenerated return type of CloneProject
+"""
+type CloneProjectPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the JobStatus for populating cloned fields.
+  """
+  jobStatusId: String
+
+  """
+  The new cloned project.
+  """
+  project: Project
+}
+
+"""
+Autogenerated input type of CloneTemplateRepository
+"""
+input CloneTemplateRepositoryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A short description of the new repository.
+  """
+  description: String
+
+  """
+  Whether to copy all branches from the template to the new repository. Defaults
+  to copying only the default branch of the template.
+  """
+  includeAllBranches: Boolean = false
+
+  """
+  The name of the new repository.
+  """
+  name: String!
+
+  """
+  The ID of the owner for the new repository.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "RepositoryOwner")
+
+  """
+  The Node ID of the template repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  Indicates the repository's visibility level.
+  """
+  visibility: RepositoryVisibility!
+}
+
+"""
+Autogenerated return type of CloneTemplateRepository
+"""
+type CloneTemplateRepositoryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new repository.
+  """
+  repository: Repository
+}
+
+"""
+An object that can be closed
+"""
+interface Closable {
+  """
+  Indicates if the object is closed (definition of closed may depend on type)
+  """
+  closed: Boolean!
+
+  """
+  Identifies the date and time when the object was closed.
+  """
+  closedAt: DateTime
+
+  """
+  Indicates if the object can be closed by the viewer.
+  """
+  viewerCanClose: Boolean!
+
+  """
+  Indicates if the object can be reopened by the viewer.
+  """
+  viewerCanReopen: Boolean!
+}
+
+"""
+Autogenerated input type of CloseDiscussion
+"""
+input CloseDiscussionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the discussion to be closed.
+  """
+  discussionId: ID! @possibleTypes(concreteTypes: ["Discussion"])
+
+  """
+  The reason why the discussion is being closed.
+  """
+  reason: DiscussionCloseReason = RESOLVED
+}
+
+"""
+Autogenerated return type of CloseDiscussion
+"""
+type CloseDiscussionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The discussion that was closed.
+  """
+  discussion: Discussion
+}
+
+"""
+Autogenerated input type of CloseIssue
+"""
+input CloseIssueInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the issue to be closed.
+  """
+  issueId: ID! @possibleTypes(concreteTypes: ["Issue"])
+
+  """
+  The reason the issue is to be closed.
+  """
+  stateReason: IssueClosedStateReason
+}
+
+"""
+Autogenerated return type of CloseIssue
+"""
+type CloseIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue that was closed.
+  """
+  issue: Issue
+}
+
+"""
+Autogenerated input type of ClosePullRequest
+"""
+input ClosePullRequestInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the pull request to be closed.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of ClosePullRequest
+"""
+type ClosePullRequestPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request that was closed.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'closed' event on any `Closable`.
+"""
+type ClosedEvent implements Node & UniformResourceLocatable {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Object that was closed.
+  """
+  closable: Closable!
+
+  """
+  Object which triggered the creation of this event.
+  """
+  closer: Closer
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the ClosedEvent object
+  """
+  id: ID!
+
+  """
+  The HTTP path for this closed event.
+  """
+  resourcePath: URI!
+
+  """
+  The reason the issue state was changed to closed.
+  """
+  stateReason: IssueStateReason
+
+  """
+  The HTTP URL for this closed event.
+  """
+  url: URI!
+}
+
+"""
+The object which triggered a `ClosedEvent`.
+"""
+union Closer = Commit | PullRequest
+
+"""
+The Code of Conduct for a repository
+"""
+type CodeOfConduct implements Node {
+  """
+  The body of the Code of Conduct
+  """
+  body: String
+
+  """
+  The Node ID of the CodeOfConduct object
+  """
+  id: ID!
+
+  """
+  The key for the Code of Conduct
+  """
+  key: String!
+
+  """
+  The formal name of the Code of Conduct
+  """
+  name: String!
+
+  """
+  The HTTP path for this Code of Conduct
+  """
+  resourcePath: URI
+
+  """
+  The HTTP URL for this Code of Conduct
+  """
+  url: URI
+}
+
+"""
+Collaborators affiliation level with a subject.
+"""
+enum CollaboratorAffiliation {
+  """
+  All collaborators the authenticated user can see.
+  """
+  ALL
+
+  """
+  All collaborators with permissions to an organization-owned subject, regardless of organization membership status.
+  """
+  DIRECT
+
+  """
+  All outside collaborators of an organization-owned subject.
+  """
+  OUTSIDE
+}
+
+"""
+Represents a comment.
+"""
+interface Comment {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  The body as Markdown.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The Node ID of the Comment object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+}
+
+"""
+A comment author association with repository.
+"""
+enum CommentAuthorAssociation {
+  """
+  Author has been invited to collaborate on the repository.
+  """
+  COLLABORATOR
+
+  """
+  Author has previously committed to the repository.
+  """
+  CONTRIBUTOR
+
+  """
+  Author has not previously committed to GitHub.
+  """
+  FIRST_TIMER
+
+  """
+  Author has not previously committed to the repository.
+  """
+  FIRST_TIME_CONTRIBUTOR
+
+  """
+  Author is a placeholder for an unclaimed user.
+  """
+  MANNEQUIN
+
+  """
+  Author is a member of the organization that owns the repository.
+  """
+  MEMBER
+
+  """
+  Author has no association with the repository.
+  """
+  NONE
+
+  """
+  Author is the owner of the repository.
+  """
+  OWNER
+}
+
+"""
+The possible errors that will prevent a user from updating a comment.
+"""
+enum CommentCannotUpdateReason {
+  """
+  Unable to create comment because repository is archived.
+  """
+  ARCHIVED
+
+  """
+  You cannot update this comment
+  """
+  DENIED
+
+  """
+  You must be the author or have write access to this repository to update this comment.
+  """
+  INSUFFICIENT_ACCESS
+
+  """
+  Unable to create comment because issue is locked.
+  """
+  LOCKED
+
+  """
+  You must be logged in to update this comment.
+  """
+  LOGIN_REQUIRED
+
+  """
+  Repository is under maintenance.
+  """
+  MAINTENANCE
+
+  """
+  At least one email address must be verified to update this comment.
+  """
+  VERIFIED_EMAIL_REQUIRED
+}
+
+"""
+Represents a 'comment_deleted' event on a given issue or pull request.
+"""
+type CommentDeletedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The user who authored the deleted comment.
+  """
+  deletedCommentAuthor: Actor
+
+  """
+  The Node ID of the CommentDeletedEvent object
+  """
+  id: ID!
+}
+
+"""
+Represents a Git commit.
+"""
+type Commit implements GitObject & Node & Subscribable & UniformResourceLocatable {
+  """
+  An abbreviated version of the Git object ID
+  """
+  abbreviatedOid: String!
+
+  """
+  The number of additions in this commit.
+  """
+  additions: Int!
+
+  """
+  The merged Pull Request that introduced the commit to the repository. If the
+  commit is not present in the default branch, additionally returns open Pull
+  Requests associated with the commit
+  """
+  associatedPullRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pull requests.
+    """
+    orderBy: PullRequestOrder = {field: CREATED_AT, direction: ASC}
+  ): PullRequestConnection
+
+  """
+  Authorship details of the commit.
+  """
+  author: GitActor
+
+  """
+  Check if the committer and the author match.
+  """
+  authoredByCommitter: Boolean!
+
+  """
+  The datetime when this commit was authored.
+  """
+  authoredDate: DateTime!
+
+  """
+  The list of authors for this commit based on the git author and the Co-authored-by
+  message trailer. The git author will always be first.
+  """
+  authors(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): GitActorConnection!
+
+  """
+  Fetches `git blame` information.
+  """
+  blame(
+    """
+    The file whose Git blame information you want.
+    """
+    path: String!
+  ): Blame!
+
+  """
+  We recommend using the `changedFilesIfAvailable` field instead of
+  `changedFiles`, as `changedFiles` will cause your request to return an error
+  if GitHub is unable to calculate the number of changed files.
+  """
+  changedFiles: Int!
+    @deprecated(
+      reason: "`changedFiles` will be removed. Use `changedFilesIfAvailable` instead. Removal on 2023-01-01 UTC."
+    )
+
+  """
+  The number of changed files in this commit. If GitHub is unable to calculate
+  the number of changed files (for example due to a timeout), this will return
+  `null`. We recommend using this field instead of `changedFiles`.
+  """
+  changedFilesIfAvailable: Int
+
+  """
+  The check suites associated with a commit.
+  """
+  checkSuites(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filters the check suites by this type.
+    """
+    filterBy: CheckSuiteFilter
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CheckSuiteConnection
+
+  """
+  Comments made on the commit.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CommitCommentConnection!
+
+  """
+  The HTTP path for this Git object
+  """
+  commitResourcePath: URI!
+
+  """
+  The HTTP URL for this Git object
+  """
+  commitUrl: URI!
+
+  """
+  The datetime when this commit was committed.
+  """
+  committedDate: DateTime!
+
+  """
+  Check if committed via GitHub web UI.
+  """
+  committedViaWeb: Boolean!
+
+  """
+  Committer details of the commit.
+  """
+  committer: GitActor
+
+  """
+  The number of deletions in this commit.
+  """
+  deletions: Int!
+
+  """
+  The deployments associated with a commit.
+  """
+  deployments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Environments to list deployments for
+    """
+    environments: [String!]
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for deployments returned from the connection.
+    """
+    orderBy: DeploymentOrder = {field: CREATED_AT, direction: ASC}
+  ): DeploymentConnection
+
+  """
+  The tree entry representing the file located at the given path.
+  """
+  file(
+    """
+    The path for the file
+    """
+    path: String!
+  ): TreeEntry
+
+  """
+  The linear commit history starting from (and including) this commit, in the same order as `git log`.
+  """
+  history(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    If non-null, filters history to only show commits with matching authorship.
+    """
+    author: CommitAuthor
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    If non-null, filters history to only show commits touching files under this path.
+    """
+    path: String
+
+    """
+    Allows specifying a beginning time or date for fetching commits.
+    """
+    since: GitTimestamp
+
+    """
+    Allows specifying an ending time or date for fetching commits.
+    """
+    until: GitTimestamp
+  ): CommitHistoryConnection!
+
+  """
+  The Node ID of the Commit object
+  """
+  id: ID!
+
+  """
+  The Git commit message
+  """
+  message: String!
+
+  """
+  The Git commit message body
+  """
+  messageBody: String!
+
+  """
+  The commit message body rendered to HTML.
+  """
+  messageBodyHTML: HTML!
+
+  """
+  The Git commit message headline
+  """
+  messageHeadline: String!
+
+  """
+  The commit message headline rendered to HTML.
+  """
+  messageHeadlineHTML: HTML!
+
+  """
+  The Git object ID
+  """
+  oid: GitObjectID!
+
+  """
+  The organization this commit was made on behalf of.
+  """
+  onBehalfOf: Organization
+
+  """
+  The parents of a commit.
+  """
+  parents(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CommitConnection!
+
+  """
+  The datetime when this commit was pushed.
+  """
+  pushedDate: DateTime @deprecated(reason: "`pushedDate` is no longer supported. Removal on 2023-07-01 UTC.")
+
+  """
+  The Repository this commit belongs to
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this commit
+  """
+  resourcePath: URI!
+
+  """
+  Commit signing information, if present.
+  """
+  signature: GitSignature
+
+  """
+  Status information for this commit
+  """
+  status: Status
+
+  """
+  Check and Status rollup information for this commit.
+  """
+  statusCheckRollup: StatusCheckRollup
+
+  """
+  Returns a list of all submodules in this repository as of this Commit parsed from the .gitmodules file.
+  """
+  submodules(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): SubmoduleConnection!
+
+  """
+  Returns a URL to download a tarball archive for a repository.
+  Note: For private repositories, these links are temporary and expire after five minutes.
+  """
+  tarballUrl: URI!
+
+  """
+  Commit's root Tree
+  """
+  tree: Tree!
+
+  """
+  The HTTP path for the tree of this commit
+  """
+  treeResourcePath: URI!
+
+  """
+  The HTTP URL for the tree of this commit
+  """
+  treeUrl: URI!
+
+  """
+  The HTTP URL for this commit
+  """
+  url: URI!
+
+  """
+  Check if the viewer is able to change their subscription status for the repository.
+  """
+  viewerCanSubscribe: Boolean!
+
+  """
+  Identifies if the viewer is watching, not watching, or ignoring the subscribable entity.
+  """
+  viewerSubscription: SubscriptionState
+
+  """
+  Returns a URL to download a zipball archive for a repository.
+  Note: For private repositories, these links are temporary and expire after five minutes.
+  """
+  zipballUrl: URI!
+}
+
+"""
+Specifies an author for filtering Git commits.
+"""
+input CommitAuthor {
+  """
+  Email addresses to filter by. Commits authored by any of the specified email addresses will be returned.
+  """
+  emails: [String!]
+
+  """
+  ID of a User to filter by. If non-null, only commits authored by this user
+  will be returned. This field takes precedence over emails.
+  """
+  id: ID
+}
+
+"""
+Parameters to be used for the commit_author_email_pattern rule
+"""
+type CommitAuthorEmailPatternParameters {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean!
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+Parameters to be used for the commit_author_email_pattern rule
+"""
+input CommitAuthorEmailPatternParametersInput {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+Represents a comment on a given Commit.
+"""
+type CommitComment implements Comment & Deletable & Minimizable & Node & Reactable & RepositoryNode & Updatable & UpdatableComment {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  Identifies the comment body.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  Identifies the commit associated with the comment, if the commit exists.
+  """
+  commit: Commit
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The Node ID of the CommitComment object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Returns whether or not a comment has been minimized.
+  """
+  isMinimized: Boolean!
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Returns why the comment was minimized. One of `abuse`, `off-topic`,
+  `outdated`, `resolved`, `duplicate` and `spam`. Note that the case and
+  formatting of these values differs from the inputs to the `MinimizeComment` mutation.
+  """
+  minimizedReason: String
+
+  """
+  Identifies the file path associated with the comment.
+  """
+  path: String
+
+  """
+  Identifies the line position associated with the comment.
+  """
+  position: Int
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path permalink for this commit comment.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL permalink for this commit comment.
+  """
+  url: URI!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Check if the current viewer can minimize this object.
+  """
+  viewerCanMinimize: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+}
+
+"""
+The connection type for CommitComment.
+"""
+type CommitCommentConnection {
+  """
+  A list of edges.
+  """
+  edges: [CommitCommentEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CommitComment]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CommitCommentEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CommitComment
+}
+
+"""
+A thread of comments on a commit.
+"""
+type CommitCommentThread implements Node & RepositoryNode {
+  """
+  The comments that exist in this thread.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CommitCommentConnection!
+
+  """
+  The commit the comments were made on.
+  """
+  commit: Commit
+
+  """
+  The Node ID of the CommitCommentThread object
+  """
+  id: ID!
+
+  """
+  The file the comments were made on.
+  """
+  path: String
+
+  """
+  The position in the diff for the commit that the comment was made on.
+  """
+  position: Int
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+}
+
+"""
+The connection type for Commit.
+"""
+type CommitConnection {
+  """
+  A list of edges.
+  """
+  edges: [CommitEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Commit]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Ordering options for commit contribution connections.
+"""
+input CommitContributionOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field by which to order commit contributions.
+  """
+  field: CommitContributionOrderField!
+}
+
+"""
+Properties by which commit contribution connections can be ordered.
+"""
+enum CommitContributionOrderField {
+  """
+  Order commit contributions by how many commits they represent.
+  """
+  COMMIT_COUNT
+
+  """
+  Order commit contributions by when they were made.
+  """
+  OCCURRED_AT
+}
+
+"""
+This aggregates commits made by a user within one repository.
+"""
+type CommitContributionsByRepository {
+  """
+  The commit contributions, each representing a day.
+  """
+  contributions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for commit contributions returned from the connection.
+    """
+    orderBy: CommitContributionOrder = {field: OCCURRED_AT, direction: DESC}
+  ): CreatedCommitContributionConnection!
+
+  """
+  The repository in which the commits were made.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for the user's commits to the repository in this time range.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for the user's commits to the repository in this time range.
+  """
+  url: URI!
+}
+
+"""
+An edge in a connection.
+"""
+type CommitEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Commit
+}
+
+"""
+The connection type for Commit.
+"""
+type CommitHistoryConnection {
+  """
+  A list of edges.
+  """
+  edges: [CommitEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Commit]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+A message to include with a new commit
+"""
+input CommitMessage {
+  """
+  The body of the message.
+  """
+  body: String
+
+  """
+  The headline of the message.
+  """
+  headline: String!
+}
+
+"""
+Parameters to be used for the commit_message_pattern rule
+"""
+type CommitMessagePatternParameters {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean!
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+Parameters to be used for the commit_message_pattern rule
+"""
+input CommitMessagePatternParametersInput {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+A git ref for a commit to be appended to.
+
+The ref must be a branch, i.e. its fully qualified name must start
+with `refs/heads/` (although the input is not required to be fully
+qualified).
+
+The Ref may be specified by its global node ID or by the
+`repositoryNameWithOwner` and `branchName`.
+
+### Examples
+
+Specify a branch using a global node ID:
+
+    { "id": "MDM6UmVmMTpyZWZzL2hlYWRzL21haW4=" }
+
+Specify a branch using `repositoryNameWithOwner` and `branchName`:
+
+    {
+      "repositoryNameWithOwner": "github/graphql-client",
+      "branchName": "main"
+    }
+"""
+input CommittableBranch {
+  """
+  The unqualified name of the branch to append the commit to.
+  """
+  branchName: String
+
+  """
+  The Node ID of the Ref to be updated.
+  """
+  id: ID
+
+  """
+  The nameWithOwner of the repository to commit to.
+  """
+  repositoryNameWithOwner: String
+}
+
+"""
+Parameters to be used for the committer_email_pattern rule
+"""
+type CommitterEmailPatternParameters {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean!
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+Parameters to be used for the committer_email_pattern rule
+"""
+input CommitterEmailPatternParametersInput {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+Represents a comparison between two commit revisions.
+"""
+type Comparison implements Node {
+  """
+  The number of commits ahead of the base branch.
+  """
+  aheadBy: Int!
+
+  """
+  The base revision of this comparison.
+  """
+  baseTarget: GitObject!
+
+  """
+  The number of commits behind the base branch.
+  """
+  behindBy: Int!
+
+  """
+  The commits which compose this comparison.
+  """
+  commits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ComparisonCommitConnection!
+
+  """
+  The head revision of this comparison.
+  """
+  headTarget: GitObject!
+
+  """
+  The Node ID of the Comparison object
+  """
+  id: ID!
+
+  """
+  The status of this comparison.
+  """
+  status: ComparisonStatus!
+}
+
+"""
+The connection type for Commit.
+"""
+type ComparisonCommitConnection {
+  """
+  The total count of authors and co-authors across all commits.
+  """
+  authorCount: Int!
+
+  """
+  A list of edges.
+  """
+  edges: [CommitEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Commit]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+The status of a git comparison between two refs.
+"""
+enum ComparisonStatus {
+  """
+  The head ref is ahead of the base ref.
+  """
+  AHEAD
+
+  """
+  The head ref is behind the base ref.
+  """
+  BEHIND
+
+  """
+  The head ref is both ahead and behind of the base ref, indicating git history has diverged.
+  """
+  DIVERGED
+
+  """
+  The head ref and base ref are identical.
+  """
+  IDENTICAL
+}
+
+"""
+Represents a 'connected' event on a given issue or pull request.
+"""
+type ConnectedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the ConnectedEvent object
+  """
+  id: ID!
+
+  """
+  Reference originated in a different repository.
+  """
+  isCrossRepository: Boolean!
+
+  """
+  Issue or pull request that made the reference.
+  """
+  source: ReferencedSubject!
+
+  """
+  Issue or pull request which was connected.
+  """
+  subject: ReferencedSubject!
+}
+
+"""
+The Contributing Guidelines for a repository.
+"""
+type ContributingGuidelines {
+  """
+  The body of the Contributing Guidelines.
+  """
+  body: String
+
+  """
+  The HTTP path for the Contributing Guidelines.
+  """
+  resourcePath: URI
+
+  """
+  The HTTP URL for the Contributing Guidelines.
+  """
+  url: URI
+}
+
+"""
+Represents a contribution a user made on GitHub, such as opening an issue.
+"""
+interface Contribution {
+  """
+  Whether this contribution is associated with a record you do not have access to. For
+  example, your own 'first issue' contribution may have been made on a repository you can no
+  longer access.
+  """
+  isRestricted: Boolean!
+
+  """
+  When this contribution was made.
+  """
+  occurredAt: DateTime!
+
+  """
+  The HTTP path for this contribution.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this contribution.
+  """
+  url: URI!
+
+  """
+  The user who made this contribution.
+  """
+  user: User!
+}
+
+"""
+A calendar of contributions made on GitHub by a user.
+"""
+type ContributionCalendar {
+  """
+  A list of hex color codes used in this calendar. The darker the color, the more contributions it represents.
+  """
+  colors: [String!]!
+
+  """
+  Determine if the color set was chosen because it's currently Halloween.
+  """
+  isHalloween: Boolean!
+
+  """
+  A list of the months of contributions in this calendar.
+  """
+  months: [ContributionCalendarMonth!]!
+
+  """
+  The count of total contributions in the calendar.
+  """
+  totalContributions: Int!
+
+  """
+  A list of the weeks of contributions in this calendar.
+  """
+  weeks: [ContributionCalendarWeek!]!
+}
+
+"""
+Represents a single day of contributions on GitHub by a user.
+"""
+type ContributionCalendarDay {
+  """
+  The hex color code that represents how many contributions were made on this day compared to others in the calendar.
+  """
+  color: String!
+
+  """
+  How many contributions were made by the user on this day.
+  """
+  contributionCount: Int!
+
+  """
+  Indication of contributions, relative to other days. Can be used to indicate
+  which color to represent this day on a calendar.
+  """
+  contributionLevel: ContributionLevel!
+
+  """
+  The day this square represents.
+  """
+  date: Date!
+
+  """
+  A number representing which day of the week this square represents, e.g., 1 is Monday.
+  """
+  weekday: Int!
+}
+
+"""
+A month of contributions in a user's contribution graph.
+"""
+type ContributionCalendarMonth {
+  """
+  The date of the first day of this month.
+  """
+  firstDay: Date!
+
+  """
+  The name of the month.
+  """
+  name: String!
+
+  """
+  How many weeks started in this month.
+  """
+  totalWeeks: Int!
+
+  """
+  The year the month occurred in.
+  """
+  year: Int!
+}
+
+"""
+A week of contributions in a user's contribution graph.
+"""
+type ContributionCalendarWeek {
+  """
+  The days of contributions in this week.
+  """
+  contributionDays: [ContributionCalendarDay!]!
+
+  """
+  The date of the earliest square in this week.
+  """
+  firstDay: Date!
+}
+
+"""
+Varying levels of contributions from none to many.
+"""
+enum ContributionLevel {
+  """
+  Lowest 25% of days of contributions.
+  """
+  FIRST_QUARTILE
+
+  """
+  Highest 25% of days of contributions. More contributions than the third quartile.
+  """
+  FOURTH_QUARTILE
+
+  """
+  No contributions occurred.
+  """
+  NONE
+
+  """
+  Second lowest 25% of days of contributions. More contributions than the first quartile.
+  """
+  SECOND_QUARTILE
+
+  """
+  Second highest 25% of days of contributions. More contributions than second quartile, less than the fourth quartile.
+  """
+  THIRD_QUARTILE
+}
+
+"""
+Ordering options for contribution connections.
+"""
+input ContributionOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+}
+
+"""
+A contributions collection aggregates contributions such as opened issues and commits created by a user.
+"""
+type ContributionsCollection {
+  """
+  Commit contributions made by the user, grouped by repository.
+  """
+  commitContributionsByRepository(
+    """
+    How many repositories should be included.
+    """
+    maxRepositories: Int = 25
+  ): [CommitContributionsByRepository!]!
+
+  """
+  A calendar of this user's contributions on GitHub.
+  """
+  contributionCalendar: ContributionCalendar!
+
+  """
+  The years the user has been making contributions with the most recent year first.
+  """
+  contributionYears: [Int!]!
+
+  """
+  Determine if this collection's time span ends in the current month.
+  """
+  doesEndInCurrentMonth: Boolean!
+
+  """
+  The date of the first restricted contribution the user made in this time
+  period. Can only be non-null when the user has enabled private contribution counts.
+  """
+  earliestRestrictedContributionDate: Date
+
+  """
+  The ending date and time of this collection.
+  """
+  endedAt: DateTime!
+
+  """
+  The first issue the user opened on GitHub. This will be null if that issue was
+  opened outside the collection's time range and ignoreTimeRange is false. If
+  the issue is not visible but the user has opted to show private contributions,
+  a RestrictedContribution will be returned.
+  """
+  firstIssueContribution: CreatedIssueOrRestrictedContribution
+
+  """
+  The first pull request the user opened on GitHub. This will be null if that
+  pull request was opened outside the collection's time range and
+  ignoreTimeRange is not true. If the pull request is not visible but the user
+  has opted to show private contributions, a RestrictedContribution will be returned.
+  """
+  firstPullRequestContribution: CreatedPullRequestOrRestrictedContribution
+
+  """
+  The first repository the user created on GitHub. This will be null if that
+  first repository was created outside the collection's time range and
+  ignoreTimeRange is false. If the repository is not visible, then a
+  RestrictedContribution is returned.
+  """
+  firstRepositoryContribution: CreatedRepositoryOrRestrictedContribution
+
+  """
+  Does the user have any more activity in the timeline that occurred prior to the collection's time range?
+  """
+  hasActivityInThePast: Boolean!
+
+  """
+  Determine if there are any contributions in this collection.
+  """
+  hasAnyContributions: Boolean!
+
+  """
+  Determine if the user made any contributions in this time frame whose details
+  are not visible because they were made in a private repository. Can only be
+  true if the user enabled private contribution counts.
+  """
+  hasAnyRestrictedContributions: Boolean!
+
+  """
+  Whether or not the collector's time span is all within the same day.
+  """
+  isSingleDay: Boolean!
+
+  """
+  A list of issues the user opened.
+  """
+  issueContributions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Should the user's first issue ever be excluded from the result.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Should the user's most commented issue be excluded from the result.
+    """
+    excludePopular: Boolean = false
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for contributions returned from the connection.
+    """
+    orderBy: ContributionOrder = {direction: DESC}
+  ): CreatedIssueContributionConnection!
+
+  """
+  Issue contributions made by the user, grouped by repository.
+  """
+  issueContributionsByRepository(
+    """
+    Should the user's first issue ever be excluded from the result.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Should the user's most commented issue be excluded from the result.
+    """
+    excludePopular: Boolean = false
+
+    """
+    How many repositories should be included.
+    """
+    maxRepositories: Int = 25
+  ): [IssueContributionsByRepository!]!
+
+  """
+  When the user signed up for GitHub. This will be null if that sign up date
+  falls outside the collection's time range and ignoreTimeRange is false.
+  """
+  joinedGitHubContribution: JoinedGitHubContribution
+
+  """
+  The date of the most recent restricted contribution the user made in this time
+  period. Can only be non-null when the user has enabled private contribution counts.
+  """
+  latestRestrictedContributionDate: Date
+
+  """
+  When this collection's time range does not include any activity from the user, use this
+  to get a different collection from an earlier time range that does have activity.
+  """
+  mostRecentCollectionWithActivity: ContributionsCollection
+
+  """
+  Returns a different contributions collection from an earlier time range than this one
+  that does not have any contributions.
+  """
+  mostRecentCollectionWithoutActivity: ContributionsCollection
+
+  """
+  The issue the user opened on GitHub that received the most comments in the specified
+  time frame.
+  """
+  popularIssueContribution: CreatedIssueContribution
+
+  """
+  The pull request the user opened on GitHub that received the most comments in the
+  specified time frame.
+  """
+  popularPullRequestContribution: CreatedPullRequestContribution
+
+  """
+  Pull request contributions made by the user.
+  """
+  pullRequestContributions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Should the user's first pull request ever be excluded from the result.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Should the user's most commented pull request be excluded from the result.
+    """
+    excludePopular: Boolean = false
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for contributions returned from the connection.
+    """
+    orderBy: ContributionOrder = {direction: DESC}
+  ): CreatedPullRequestContributionConnection!
+
+  """
+  Pull request contributions made by the user, grouped by repository.
+  """
+  pullRequestContributionsByRepository(
+    """
+    Should the user's first pull request ever be excluded from the result.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Should the user's most commented pull request be excluded from the result.
+    """
+    excludePopular: Boolean = false
+
+    """
+    How many repositories should be included.
+    """
+    maxRepositories: Int = 25
+  ): [PullRequestContributionsByRepository!]!
+
+  """
+  Pull request review contributions made by the user. Returns the most recently
+  submitted review for each PR reviewed by the user.
+  """
+  pullRequestReviewContributions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for contributions returned from the connection.
+    """
+    orderBy: ContributionOrder = {direction: DESC}
+  ): CreatedPullRequestReviewContributionConnection!
+
+  """
+  Pull request review contributions made by the user, grouped by repository.
+  """
+  pullRequestReviewContributionsByRepository(
+    """
+    How many repositories should be included.
+    """
+    maxRepositories: Int = 25
+  ): [PullRequestReviewContributionsByRepository!]!
+
+  """
+  A list of repositories owned by the user that the user created in this time range.
+  """
+  repositoryContributions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Should the user's first repository ever be excluded from the result.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for contributions returned from the connection.
+    """
+    orderBy: ContributionOrder = {direction: DESC}
+  ): CreatedRepositoryContributionConnection!
+
+  """
+  A count of contributions made by the user that the viewer cannot access. Only
+  non-zero when the user has chosen to share their private contribution counts.
+  """
+  restrictedContributionsCount: Int!
+
+  """
+  The beginning date and time of this collection.
+  """
+  startedAt: DateTime!
+
+  """
+  How many commits were made by the user in this time span.
+  """
+  totalCommitContributions: Int!
+
+  """
+  How many issues the user opened.
+  """
+  totalIssueContributions(
+    """
+    Should the user's first issue ever be excluded from this count.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Should the user's most commented issue be excluded from this count.
+    """
+    excludePopular: Boolean = false
+  ): Int!
+
+  """
+  How many pull requests the user opened.
+  """
+  totalPullRequestContributions(
+    """
+    Should the user's first pull request ever be excluded from this count.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Should the user's most commented pull request be excluded from this count.
+    """
+    excludePopular: Boolean = false
+  ): Int!
+
+  """
+  How many pull request reviews the user left.
+  """
+  totalPullRequestReviewContributions: Int!
+
+  """
+  How many different repositories the user committed to.
+  """
+  totalRepositoriesWithContributedCommits: Int!
+
+  """
+  How many different repositories the user opened issues in.
+  """
+  totalRepositoriesWithContributedIssues(
+    """
+    Should the user's first issue ever be excluded from this count.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Should the user's most commented issue be excluded from this count.
+    """
+    excludePopular: Boolean = false
+  ): Int!
+
+  """
+  How many different repositories the user left pull request reviews in.
+  """
+  totalRepositoriesWithContributedPullRequestReviews: Int!
+
+  """
+  How many different repositories the user opened pull requests in.
+  """
+  totalRepositoriesWithContributedPullRequests(
+    """
+    Should the user's first pull request ever be excluded from this count.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Should the user's most commented pull request be excluded from this count.
+    """
+    excludePopular: Boolean = false
+  ): Int!
+
+  """
+  How many repositories the user created.
+  """
+  totalRepositoryContributions(
+    """
+    Should the user's first repository ever be excluded from this count.
+    """
+    excludeFirst: Boolean = false
+  ): Int!
+
+  """
+  The user who made the contributions in this collection.
+  """
+  user: User!
+}
+
+"""
+Autogenerated input type of ConvertProjectCardNoteToIssue
+"""
+input ConvertProjectCardNoteToIssueInput {
+  """
+  The body of the newly created issue.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ProjectCard ID to convert.
+  """
+  projectCardId: ID! @possibleTypes(concreteTypes: ["ProjectCard"])
+
+  """
+  The ID of the repository to create the issue in.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The title of the newly created issue. Defaults to the card's note text.
+  """
+  title: String
+}
+
+"""
+Autogenerated return type of ConvertProjectCardNoteToIssue
+"""
+type ConvertProjectCardNoteToIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated ProjectCard.
+  """
+  projectCard: ProjectCard
+}
+
+"""
+Autogenerated input type of ConvertPullRequestToDraft
+"""
+input ConvertPullRequestToDraftInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the pull request to convert to draft
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of ConvertPullRequestToDraft
+"""
+type ConvertPullRequestToDraftPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request that is now a draft.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'convert_to_draft' event on a given pull request.
+"""
+type ConvertToDraftEvent implements Node & UniformResourceLocatable {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the ConvertToDraftEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  The HTTP path for this convert to draft event.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this convert to draft event.
+  """
+  url: URI!
+}
+
+"""
+Represents a 'converted_note_to_issue' event on a given issue or pull request.
+"""
+type ConvertedNoteToIssueEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the ConvertedNoteToIssueEvent object
+  """
+  id: ID!
+
+  """
+  Project referenced by event.
+  """
+  project: Project @preview(toggledBy: "starfox-preview")
+
+  """
+  Project card referenced by this project event.
+  """
+  projectCard: ProjectCard @preview(toggledBy: "starfox-preview")
+
+  """
+  Column name referenced by this project event.
+  """
+  projectColumnName: String! @preview(toggledBy: "starfox-preview")
+}
+
+"""
+Represents a 'converted_to_discussion' event on a given issue.
+"""
+type ConvertedToDiscussionEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The discussion that the issue was converted into.
+  """
+  discussion: Discussion
+
+  """
+  The Node ID of the ConvertedToDiscussionEvent object
+  """
+  id: ID!
+}
+
+"""
+Autogenerated input type of CopyProjectV2
+"""
+input CopyProjectV2Input {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Include draft issues in the new project
+  """
+  includeDraftIssues: Boolean = false
+
+  """
+  The owner ID of the new project.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "OrganizationOrUser")
+
+  """
+  The ID of the source Project to copy.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  The title of the project.
+  """
+  title: String!
+}
+
+"""
+Autogenerated return type of CopyProjectV2
+"""
+type CopyProjectV2Payload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The copied project.
+  """
+  projectV2: ProjectV2
+}
+
+"""
+Autogenerated input type of CreateAttributionInvitation
+"""
+input CreateAttributionInvitationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the owner scoping the reattributable data.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Bot", "Enterprise", "Mannequin", "Organization", "User"])
+
+  """
+  The Node ID of the account owning the data to reattribute.
+  """
+  sourceId: ID! @possibleTypes(concreteTypes: ["Bot", "Enterprise", "Mannequin", "Organization", "User"])
+
+  """
+  The Node ID of the account which may claim the data.
+  """
+  targetId: ID! @possibleTypes(concreteTypes: ["Bot", "Enterprise", "Mannequin", "Organization", "User"])
+}
+
+"""
+Autogenerated return type of CreateAttributionInvitation
+"""
+type CreateAttributionInvitationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The owner scoping the reattributable data.
+  """
+  owner: Organization
+
+  """
+  The account owning the data to reattribute.
+  """
+  source: Claimable
+
+  """
+  The account which may claim the data.
+  """
+  target: Claimable
+}
+
+"""
+Autogenerated input type of CreateBranchProtectionRule
+"""
+input CreateBranchProtectionRuleInput {
+  """
+  Can this branch be deleted.
+  """
+  allowsDeletions: Boolean
+
+  """
+  Are force pushes allowed on this branch.
+  """
+  allowsForcePushes: Boolean
+
+  """
+  Is branch creation a protected operation.
+  """
+  blocksCreations: Boolean
+
+  """
+  A list of User, Team, or App IDs allowed to bypass force push targeting matching branches.
+  """
+  bypassForcePushActorIds: [ID!]
+
+  """
+  A list of User, Team, or App IDs allowed to bypass pull requests targeting matching branches.
+  """
+  bypassPullRequestActorIds: [ID!]
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Will new commits pushed to matching branches dismiss pull request review approvals.
+  """
+  dismissesStaleReviews: Boolean
+
+  """
+  Can admins overwrite branch protection.
+  """
+  isAdminEnforced: Boolean
+
+  """
+  Whether users can pull changes from upstream when the branch is locked. Set to
+  `true` to allow fork syncing. Set to `false` to prevent fork syncing.
+  """
+  lockAllowsFetchAndMerge: Boolean
+
+  """
+  Whether to set the branch as read-only. If this is true, users will not be able to push to the branch.
+  """
+  lockBranch: Boolean
+
+  """
+  The glob-like pattern used to determine matching branches.
+  """
+  pattern: String!
+
+  """
+  A list of User, Team, or App IDs allowed to push to matching branches.
+  """
+  pushActorIds: [ID!]
+
+  """
+  The global relay id of the repository in which a new branch protection rule should be created in.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  Whether the most recent push must be approved by someone other than the person who pushed it
+  """
+  requireLastPushApproval: Boolean
+
+  """
+  Number of approving reviews required to update matching branches.
+  """
+  requiredApprovingReviewCount: Int
+
+  """
+  The list of required deployment environments
+  """
+  requiredDeploymentEnvironments: [String!]
+
+  """
+  List of required status check contexts that must pass for commits to be accepted to matching branches.
+  """
+  requiredStatusCheckContexts: [String!]
+
+  """
+  The list of required status checks
+  """
+  requiredStatusChecks: [RequiredStatusCheckInput!]
+
+  """
+  Are approving reviews required to update matching branches.
+  """
+  requiresApprovingReviews: Boolean
+
+  """
+  Are reviews from code owners required to update matching branches.
+  """
+  requiresCodeOwnerReviews: Boolean
+
+  """
+  Are commits required to be signed.
+  """
+  requiresCommitSignatures: Boolean
+
+  """
+  Are conversations required to be resolved before merging.
+  """
+  requiresConversationResolution: Boolean
+
+  """
+  Are successful deployments required before merging.
+  """
+  requiresDeployments: Boolean
+
+  """
+  Are merge commits prohibited from being pushed to this branch.
+  """
+  requiresLinearHistory: Boolean
+
+  """
+  Are status checks required to update matching branches.
+  """
+  requiresStatusChecks: Boolean
+
+  """
+  Are branches required to be up to date before merging.
+  """
+  requiresStrictStatusChecks: Boolean
+
+  """
+  Is pushing to matching branches restricted.
+  """
+  restrictsPushes: Boolean
+
+  """
+  Is dismissal of pull request reviews restricted.
+  """
+  restrictsReviewDismissals: Boolean
+
+  """
+  A list of User, Team, or App IDs allowed to dismiss reviews on pull requests targeting matching branches.
+  """
+  reviewDismissalActorIds: [ID!]
+}
+
+"""
+Autogenerated return type of CreateBranchProtectionRule
+"""
+type CreateBranchProtectionRulePayload {
+  """
+  The newly created BranchProtectionRule.
+  """
+  branchProtectionRule: BranchProtectionRule
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of CreateCheckRun
+"""
+input CreateCheckRunInput {
+  """
+  Possible further actions the integrator can perform, which a user may trigger.
+  """
+  actions: [CheckRunAction!]
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The time that the check run finished.
+  """
+  completedAt: DateTime
+
+  """
+  The final conclusion of the check.
+  """
+  conclusion: CheckConclusionState
+
+  """
+  The URL of the integrator's site that has the full details of the check.
+  """
+  detailsUrl: URI
+
+  """
+  A reference for the run on the integrator's system.
+  """
+  externalId: String
+
+  """
+  The SHA of the head commit.
+  """
+  headSha: GitObjectID!
+
+  """
+  The name of the check.
+  """
+  name: String!
+
+  """
+  Descriptive details about the run.
+  """
+  output: CheckRunOutput
+
+  """
+  The node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The time that the check run began.
+  """
+  startedAt: DateTime
+
+  """
+  The current status.
+  """
+  status: RequestableCheckStatusState
+}
+
+"""
+Autogenerated return type of CreateCheckRun
+"""
+type CreateCheckRunPayload {
+  """
+  The newly created check run.
+  """
+  checkRun: CheckRun
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of CreateCheckSuite
+"""
+input CreateCheckSuiteInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The SHA of the head commit.
+  """
+  headSha: GitObjectID!
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of CreateCheckSuite
+"""
+type CreateCheckSuitePayload {
+  """
+  The newly created check suite.
+  """
+  checkSuite: CheckSuite
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of CreateCommitOnBranch
+"""
+input CreateCommitOnBranchInput {
+  """
+  The Ref to be updated.  Must be a branch.
+  """
+  branch: CommittableBranch!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The git commit oid expected at the head of the branch prior to the commit
+  """
+  expectedHeadOid: GitObjectID!
+
+  """
+  A description of changes to files in this commit.
+  """
+  fileChanges: FileChanges
+
+  """
+  The commit message the be included with the commit.
+  """
+  message: CommitMessage!
+}
+
+"""
+Autogenerated return type of CreateCommitOnBranch
+"""
+type CreateCommitOnBranchPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new commit.
+  """
+  commit: Commit
+
+  """
+  The ref which has been updated to point to the new commit.
+  """
+  ref: Ref
+}
+
+"""
+Autogenerated input type of CreateDeployment
+"""
+input CreateDeploymentInput @preview(toggledBy: "flash-preview") {
+  """
+  Attempt to automatically merge the default branch into the requested ref, defaults to true.
+  """
+  autoMerge: Boolean = true
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Short description of the deployment.
+  """
+  description: String = ""
+
+  """
+  Name for the target deployment environment.
+  """
+  environment: String = "production"
+
+  """
+  JSON payload with extra information about the deployment.
+  """
+  payload: String = "{}"
+
+  """
+  The node ID of the ref to be deployed.
+  """
+  refId: ID! @possibleTypes(concreteTypes: ["Ref"])
+
+  """
+  The node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The status contexts to verify against commit status checks. To bypass required
+  contexts, pass an empty array. Defaults to all unique contexts.
+  """
+  requiredContexts: [String!]
+
+  """
+  Specifies a task to execute.
+  """
+  task: String = "deploy"
+}
+
+"""
+Autogenerated return type of CreateDeployment
+"""
+type CreateDeploymentPayload @preview(toggledBy: "flash-preview") {
+  """
+  True if the default branch has been auto-merged into the deployment ref.
+  """
+  autoMerged: Boolean
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new deployment.
+  """
+  deployment: Deployment
+}
+
+"""
+Autogenerated input type of CreateDeploymentStatus
+"""
+input CreateDeploymentStatusInput @preview(toggledBy: "flash-preview") {
+  """
+  Adds a new inactive status to all non-transient, non-production environment
+  deployments with the same repository and environment name as the created
+  status's deployment.
+  """
+  autoInactive: Boolean = true
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The node ID of the deployment.
+  """
+  deploymentId: ID! @possibleTypes(concreteTypes: ["Deployment"])
+
+  """
+  A short description of the status. Maximum length of 140 characters.
+  """
+  description: String = ""
+
+  """
+  If provided, updates the environment of the deploy. Otherwise, does not modify the environment.
+  """
+  environment: String
+
+  """
+  Sets the URL for accessing your environment.
+  """
+  environmentUrl: String = ""
+
+  """
+  The log URL to associate with this status.       This URL should contain
+  output to keep the user updated while the task is running       or serve as
+  historical information for what happened in the deployment.
+  """
+  logUrl: String = ""
+
+  """
+  The state of the deployment.
+  """
+  state: DeploymentStatusState!
+}
+
+"""
+Autogenerated return type of CreateDeploymentStatus
+"""
+type CreateDeploymentStatusPayload @preview(toggledBy: "flash-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new deployment status.
+  """
+  deploymentStatus: DeploymentStatus
+}
+
+"""
+Autogenerated input type of CreateDiscussion
+"""
+input CreateDiscussionInput {
+  """
+  The body of the discussion.
+  """
+  body: String!
+
+  """
+  The id of the discussion category to associate with this discussion.
+  """
+  categoryId: ID! @possibleTypes(concreteTypes: ["DiscussionCategory"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the repository on which to create the discussion.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The title of the discussion.
+  """
+  title: String!
+}
+
+"""
+Autogenerated return type of CreateDiscussion
+"""
+type CreateDiscussionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The discussion that was just created.
+  """
+  discussion: Discussion
+}
+
+"""
+Autogenerated input type of CreateEnterpriseOrganization
+"""
+input CreateEnterpriseOrganizationInput {
+  """
+  The logins for the administrators of the new organization.
+  """
+  adminLogins: [String!]!
+
+  """
+  The email used for sending billing receipts.
+  """
+  billingEmail: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise owning the new organization.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The login of the new organization.
+  """
+  login: String!
+
+  """
+  The profile name of the new organization.
+  """
+  profileName: String!
+}
+
+"""
+Autogenerated return type of CreateEnterpriseOrganization
+"""
+type CreateEnterpriseOrganizationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise that owns the created organization.
+  """
+  enterprise: Enterprise
+
+  """
+  The organization that was created.
+  """
+  organization: Organization
+}
+
+"""
+Autogenerated input type of CreateEnvironment
+"""
+input CreateEnvironmentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of the environment.
+  """
+  name: String!
+
+  """
+  The node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of CreateEnvironment
+"""
+type CreateEnvironmentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new or existing environment.
+  """
+  environment: Environment
+}
+
+"""
+Autogenerated input type of CreateIpAllowListEntry
+"""
+input CreateIpAllowListEntryInput {
+  """
+  An IP address or range of addresses in CIDR notation.
+  """
+  allowListValue: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Whether the IP allow list entry is active when an IP allow list is enabled.
+  """
+  isActive: Boolean!
+
+  """
+  An optional name for the IP allow list entry.
+  """
+  name: String
+
+  """
+  The ID of the owner for which to create the new IP allow list entry.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["App", "Enterprise", "Organization"], abstractType: "IpAllowListOwner")
+}
+
+"""
+Autogenerated return type of CreateIpAllowListEntry
+"""
+type CreateIpAllowListEntryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The IP allow list entry that was created.
+  """
+  ipAllowListEntry: IpAllowListEntry
+}
+
+"""
+Autogenerated input type of CreateIssue
+"""
+input CreateIssueInput {
+  """
+  The Node ID for the user assignee for this issue.
+  """
+  assigneeIds: [ID!] @possibleTypes(concreteTypes: ["User"])
+
+  """
+  The body for the issue description.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of an issue template in the repository, assigns labels and assignees from the template to the issue
+  """
+  issueTemplate: String
+
+  """
+  An array of Node IDs of labels for this issue.
+  """
+  labelIds: [ID!] @possibleTypes(concreteTypes: ["Label"])
+
+  """
+  The Node ID of the milestone for this issue.
+  """
+  milestoneId: ID @possibleTypes(concreteTypes: ["Milestone"])
+
+  """
+  An array of Node IDs for projects associated with this issue.
+  """
+  projectIds: [ID!] @possibleTypes(concreteTypes: ["Project"])
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The title for the issue.
+  """
+  title: String!
+}
+
+"""
+Autogenerated return type of CreateIssue
+"""
+type CreateIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new issue.
+  """
+  issue: Issue
+}
+
+"""
+Autogenerated input type of CreateLabel
+"""
+input CreateLabelInput @preview(toggledBy: "bane-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A 6 character hex code, without the leading #, identifying the color of the label.
+  """
+  color: String!
+
+  """
+  A brief description of the label, such as its purpose.
+  """
+  description: String
+
+  """
+  The name of the label.
+  """
+  name: String!
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of CreateLabel
+"""
+type CreateLabelPayload @preview(toggledBy: "bane-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new label.
+  """
+  label: Label
+}
+
+"""
+Autogenerated input type of CreateLinkedBranch
+"""
+input CreateLinkedBranchInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the issue to link to.
+  """
+  issueId: ID! @possibleTypes(concreteTypes: ["Issue"])
+
+  """
+  The name of the new branch. Defaults to issue number and title.
+  """
+  name: String
+
+  """
+  The commit SHA to base the new branch on.
+  """
+  oid: GitObjectID!
+
+  """
+  ID of the repository to create the branch in. Defaults to the issue repository.
+  """
+  repositoryId: ID @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of CreateLinkedBranch
+"""
+type CreateLinkedBranchPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue that was linked to.
+  """
+  issue: Issue
+
+  """
+  The new branch issue reference.
+  """
+  linkedBranch: LinkedBranch
+}
+
+"""
+Autogenerated input type of CreateMigrationSource
+"""
+input CreateMigrationSourceInput {
+  """
+  The migration source access token.
+  """
+  accessToken: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The GitHub personal access token of the user importing to the target repository.
+  """
+  githubPat: String
+
+  """
+  The migration source name.
+  """
+  name: String!
+
+  """
+  The ID of the organization that will own the migration source.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Organization"])
+
+  """
+  The migration source type.
+  """
+  type: MigrationSourceType!
+
+  """
+  The migration source URL, for example `https://github.com` or `https://monalisa.ghe.com`.
+  """
+  url: String
+}
+
+"""
+Autogenerated return type of CreateMigrationSource
+"""
+type CreateMigrationSourcePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The created migration source.
+  """
+  migrationSource: MigrationSource
+}
+
+"""
+Autogenerated input type of CreateProject
+"""
+input CreateProjectInput {
+  """
+  The description of project.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of project.
+  """
+  name: String!
+
+  """
+  The owner ID to create the project under.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Organization", "Repository", "User"], abstractType: "ProjectOwner")
+
+  """
+  A list of repository IDs to create as linked repositories for the project
+  """
+  repositoryIds: [ID!] @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The name of the GitHub-provided template.
+  """
+  template: ProjectTemplate
+}
+
+"""
+Autogenerated return type of CreateProject
+"""
+type CreateProjectPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new project.
+  """
+  project: Project
+}
+
+"""
+Autogenerated input type of CreateProjectV2Field
+"""
+input CreateProjectV2FieldInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The data type of the field.
+  """
+  dataType: ProjectV2CustomFieldType!
+
+  """
+  The name of the field.
+  """
+  name: String!
+
+  """
+  The ID of the Project to create the field in.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  Options for a single select field. At least one value is required if data_type is SINGLE_SELECT
+  """
+  singleSelectOptions: [ProjectV2SingleSelectFieldOptionInput!]
+}
+
+"""
+Autogenerated return type of CreateProjectV2Field
+"""
+type CreateProjectV2FieldPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new field.
+  """
+  projectV2Field: ProjectV2FieldConfiguration
+}
+
+"""
+Autogenerated input type of CreateProjectV2
+"""
+input CreateProjectV2Input {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The owner ID to create the project under.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "OrganizationOrUser")
+
+  """
+  The repository to link the project to.
+  """
+  repositoryId: ID @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The team to link the project to. The team will be granted read permissions.
+  """
+  teamId: ID @possibleTypes(concreteTypes: ["Team"])
+
+  """
+  The title of the project.
+  """
+  title: String!
+}
+
+"""
+Autogenerated return type of CreateProjectV2
+"""
+type CreateProjectV2Payload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new project.
+  """
+  projectV2: ProjectV2
+}
+
+"""
+Autogenerated input type of CreatePullRequest
+"""
+input CreatePullRequestInput {
+  """
+  The name of the branch you want your changes pulled into. This should be an existing branch
+  on the current repository. You cannot update the base branch on a pull request to point
+  to another repository.
+  """
+  baseRefName: String!
+
+  """
+  The contents of the pull request.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Indicates whether this pull request should be a draft.
+  """
+  draft: Boolean = false
+
+  """
+  The name of the branch where your changes are implemented. For cross-repository pull requests
+  in the same network, namespace `head_ref_name` with a user like this: `username:branch`.
+  """
+  headRefName: String!
+
+  """
+  The Node ID of the head repository.
+  """
+  headRepositoryId: ID @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  Indicates whether maintainers can modify the pull request.
+  """
+  maintainerCanModify: Boolean = true
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The title of the pull request.
+  """
+  title: String!
+}
+
+"""
+Autogenerated return type of CreatePullRequest
+"""
+type CreatePullRequestPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new pull request.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Autogenerated input type of CreateRef
+"""
+input CreateRefInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The fully qualified name of the new Ref (ie: `refs/heads/my_new_branch`).
+  """
+  name: String!
+
+  """
+  The GitObjectID that the new Ref shall target. Must point to a commit.
+  """
+  oid: GitObjectID!
+
+  """
+  The Node ID of the Repository to create the Ref in.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of CreateRef
+"""
+type CreateRefPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The newly created ref.
+  """
+  ref: Ref
+}
+
+"""
+Autogenerated input type of CreateRepository
+"""
+input CreateRepositoryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A short description of the new repository.
+  """
+  description: String
+
+  """
+  Indicates if the repository should have the issues feature enabled.
+  """
+  hasIssuesEnabled: Boolean = true
+
+  """
+  Indicates if the repository should have the wiki feature enabled.
+  """
+  hasWikiEnabled: Boolean = false
+
+  """
+  The URL for a web page about this repository.
+  """
+  homepageUrl: URI
+
+  """
+  The name of the new repository.
+  """
+  name: String!
+
+  """
+  The ID of the owner for the new repository.
+  """
+  ownerId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "RepositoryOwner")
+
+  """
+  When an organization is specified as the owner, this ID identifies the team
+  that should be granted access to the new repository.
+  """
+  teamId: ID @possibleTypes(concreteTypes: ["Team"])
+
+  """
+  Whether this repository should be marked as a template such that anyone who
+  can access it can create new repositories with the same files and directory structure.
+  """
+  template: Boolean = false
+
+  """
+  Indicates the repository's visibility level.
+  """
+  visibility: RepositoryVisibility!
+}
+
+"""
+Autogenerated return type of CreateRepository
+"""
+type CreateRepositoryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new repository.
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of CreateRepositoryRuleset
+"""
+input CreateRepositoryRulesetInput {
+  """
+  A list of actors that are allowed to bypass rules in this ruleset.
+  """
+  bypassActors: [RepositoryRulesetBypassActorInput!]
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The set of conditions for this ruleset
+  """
+  conditions: RepositoryRuleConditionsInput!
+
+  """
+  The enforcement level for this ruleset
+  """
+  enforcement: RuleEnforcement!
+
+  """
+  The name of the ruleset.
+  """
+  name: String!
+
+  """
+  The list of rules for this ruleset
+  """
+  rules: [RepositoryRuleInput!]
+
+  """
+  The global relay id of the source in which a new ruleset should be created in.
+  """
+  sourceId: ID! @possibleTypes(concreteTypes: ["Organization", "Repository"], abstractType: "RuleSource")
+
+  """
+  The target of the ruleset.
+  """
+  target: RepositoryRulesetTarget
+}
+
+"""
+Autogenerated return type of CreateRepositoryRuleset
+"""
+type CreateRepositoryRulesetPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The newly created Ruleset.
+  """
+  ruleset: RepositoryRuleset
+}
+
+"""
+Autogenerated input type of CreateSponsorsListing
+"""
+input CreateSponsorsListingInput {
+  """
+  The country or region where the sponsorable's bank account is located.
+  Required if fiscalHostLogin is not specified, ignored when fiscalHostLogin is specified.
+  """
+  billingCountryOrRegionCode: SponsorsCountryOrRegionCode
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The email address we should use to contact you about the GitHub Sponsors
+  profile being created. This will not be shared publicly. Must be a verified
+  email address already on your GitHub account. Only relevant when the
+  sponsorable is yourself. Defaults to your primary email address on file if omitted.
+  """
+  contactEmail: String
+
+  """
+  The username of the supported fiscal host's GitHub organization, if you want
+  to receive sponsorship payouts through a fiscal host rather than directly to a
+  bank account. For example, 'Open-Source-Collective' for Open Source Collective
+  or 'numfocus' for numFOCUS. Case insensitive. See https://docs.github.com/sponsors/receiving-sponsorships-through-github-sponsors/using-a-fiscal-host-to-receive-github-sponsors-payouts
+  for more information.
+  """
+  fiscalHostLogin: String
+
+  """
+  The URL for your profile page on the fiscal host's website, e.g.,
+  https://opencollective.com/babel or https://numfocus.org/project/bokeh.
+  Required if fiscalHostLogin is specified.
+  """
+  fiscallyHostedProjectProfileUrl: String
+
+  """
+  Provide an introduction to serve as the main focus that appears on your GitHub
+  Sponsors profile. It's a great opportunity to help potential sponsors learn
+  more about you, your work, and why their sponsorship is important to you.
+  GitHub-flavored Markdown is supported.
+  """
+  fullDescription: String
+
+  """
+  The country or region where the sponsorable resides. This is for tax purposes.
+  Required if the sponsorable is yourself, ignored when sponsorableLogin
+  specifies an organization.
+  """
+  residenceCountryOrRegionCode: SponsorsCountryOrRegionCode
+
+  """
+  The username of the organization to create a GitHub Sponsors profile for, if
+  desired. Defaults to creating a GitHub Sponsors profile for the authenticated
+  user if omitted.
+  """
+  sponsorableLogin: String
+}
+
+"""
+Autogenerated return type of CreateSponsorsListing
+"""
+type CreateSponsorsListingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new GitHub Sponsors profile.
+  """
+  sponsorsListing: SponsorsListing
+}
+
+"""
+Autogenerated input type of CreateSponsorsTier
+"""
+input CreateSponsorsTierInput {
+  """
+  The value of the new tier in US dollars. Valid values: 1-12000.
+  """
+  amount: Int!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A description of what this tier is, what perks sponsors might receive, what a sponsorship at this tier means for you, etc.
+  """
+  description: String!
+
+  """
+  Whether sponsorships using this tier should happen monthly/yearly or just once.
+  """
+  isRecurring: Boolean = true
+
+  """
+  Whether to make the tier available immediately for sponsors to choose.
+  Defaults to creating a draft tier that will not be publicly visible.
+  """
+  publish: Boolean = false
+
+  """
+  Optional ID of the private repository that sponsors at this tier should gain
+  read-only access to. Must be owned by an organization.
+  """
+  repositoryId: ID @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  Optional name of the private repository that sponsors at this tier should gain
+  read-only access to. Must be owned by an organization. Necessary if
+  repositoryOwnerLogin is given. Will be ignored if repositoryId is given.
+  """
+  repositoryName: String
+
+  """
+  Optional login of the organization owner of the private repository that
+  sponsors at this tier should gain read-only access to. Necessary if
+  repositoryName is given. Will be ignored if repositoryId is given.
+  """
+  repositoryOwnerLogin: String
+
+  """
+  The ID of the user or organization who owns the GitHub Sponsors profile.
+  Defaults to the current user if omitted and sponsorableLogin is not given.
+  """
+  sponsorableId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "Sponsorable")
+
+  """
+  The username of the user or organization who owns the GitHub Sponsors profile.
+  Defaults to the current user if omitted and sponsorableId is not given.
+  """
+  sponsorableLogin: String
+
+  """
+  Optional message new sponsors at this tier will receive.
+  """
+  welcomeMessage: String
+}
+
+"""
+Autogenerated return type of CreateSponsorsTier
+"""
+type CreateSponsorsTierPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new tier.
+  """
+  sponsorsTier: SponsorsTier
+}
+
+"""
+Autogenerated input type of CreateSponsorship
+"""
+input CreateSponsorshipInput {
+  """
+  The amount to pay to the sponsorable in US dollars. Required if a tierId is not specified. Valid values: 1-12000.
+  """
+  amount: Int
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Whether the sponsorship should happen monthly/yearly or just this one time. Required if a tierId is not specified.
+  """
+  isRecurring: Boolean
+
+  """
+  Specify whether others should be able to see that the sponsor is sponsoring
+  the sponsorable. Public visibility still does not reveal which tier is used.
+  """
+  privacyLevel: SponsorshipPrivacy = PUBLIC
+
+  """
+  Whether the sponsor should receive email updates from the sponsorable.
+  """
+  receiveEmails: Boolean = true
+
+  """
+  The ID of the user or organization who is acting as the sponsor, paying for
+  the sponsorship. Required if sponsorLogin is not given.
+  """
+  sponsorId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "Sponsor")
+
+  """
+  The username of the user or organization who is acting as the sponsor, paying
+  for the sponsorship. Required if sponsorId is not given.
+  """
+  sponsorLogin: String
+
+  """
+  The ID of the user or organization who is receiving the sponsorship. Required if sponsorableLogin is not given.
+  """
+  sponsorableId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "Sponsorable")
+
+  """
+  The username of the user or organization who is receiving the sponsorship. Required if sponsorableId is not given.
+  """
+  sponsorableLogin: String
+
+  """
+  The ID of one of sponsorable's existing tiers to sponsor at. Required if amount is not specified.
+  """
+  tierId: ID @possibleTypes(concreteTypes: ["SponsorsTier"])
+}
+
+"""
+Autogenerated return type of CreateSponsorship
+"""
+type CreateSponsorshipPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The sponsorship that was started.
+  """
+  sponsorship: Sponsorship
+}
+
+"""
+Autogenerated input type of CreateSponsorships
+"""
+input CreateSponsorshipsInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Specify whether others should be able to see that the sponsor is sponsoring
+  the sponsorables. Public visibility still does not reveal the dollar value of
+  the sponsorship.
+  """
+  privacyLevel: SponsorshipPrivacy = PUBLIC
+
+  """
+  Whether the sponsor should receive email updates from the sponsorables.
+  """
+  receiveEmails: Boolean = false
+
+  """
+  The username of the user or organization who is acting as the sponsor, paying for the sponsorships.
+  """
+  sponsorLogin: String!
+
+  """
+  The list of maintainers to sponsor and for how much apiece.
+  """
+  sponsorships: [BulkSponsorship!]!
+}
+
+"""
+Autogenerated return type of CreateSponsorships
+"""
+type CreateSponsorshipsPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The users and organizations who received a sponsorship.
+  """
+  sponsorables: [Sponsorable!]
+}
+
+"""
+Autogenerated input type of CreateTeamDiscussionComment
+"""
+input CreateTeamDiscussionCommentInput {
+  """
+  The content of the comment. This field is required.
+
+  **Upcoming Change on 2024-07-01 UTC**
+  **Description:** `body` will be removed. Follow the guide at
+  https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to
+  find a suitable replacement.
+  **Reason:** The Team Discussions feature is deprecated in favor of Organization Discussions.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the discussion to which the comment belongs. This field is required.
+
+  **Upcoming Change on 2024-07-01 UTC**
+  **Description:** `discussionId` will be removed. Follow the guide at
+  https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to
+  find a suitable replacement.
+  **Reason:** The Team Discussions feature is deprecated in favor of Organization Discussions.
+  """
+  discussionId: ID @possibleTypes(concreteTypes: ["TeamDiscussion"])
+}
+
+"""
+Autogenerated return type of CreateTeamDiscussionComment
+"""
+type CreateTeamDiscussionCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new comment.
+  """
+  teamDiscussionComment: TeamDiscussionComment
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+}
+
+"""
+Autogenerated input type of CreateTeamDiscussion
+"""
+input CreateTeamDiscussionInput {
+  """
+  The content of the discussion. This field is required.
+
+  **Upcoming Change on 2024-07-01 UTC**
+  **Description:** `body` will be removed. Follow the guide at
+  https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to
+  find a suitable replacement.
+  **Reason:** The Team Discussions feature is deprecated in favor of Organization Discussions.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  If true, restricts the visibility of this discussion to team members and
+  organization owners. If false or not specified, allows any organization member
+  to view this discussion.
+
+  **Upcoming Change on 2024-07-01 UTC**
+  **Description:** `private` will be removed. Follow the guide at
+  https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to
+  find a suitable replacement.
+  **Reason:** The Team Discussions feature is deprecated in favor of Organization Discussions.
+  """
+  private: Boolean
+
+  """
+  The ID of the team to which the discussion belongs. This field is required.
+
+  **Upcoming Change on 2024-07-01 UTC**
+  **Description:** `teamId` will be removed. Follow the guide at
+  https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to
+  find a suitable replacement.
+  **Reason:** The Team Discussions feature is deprecated in favor of Organization Discussions.
+  """
+  teamId: ID @possibleTypes(concreteTypes: ["Team"])
+
+  """
+  The title of the discussion. This field is required.
+
+  **Upcoming Change on 2024-07-01 UTC**
+  **Description:** `title` will be removed. Follow the guide at
+  https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to
+  find a suitable replacement.
+  **Reason:** The Team Discussions feature is deprecated in favor of Organization Discussions.
+  """
+  title: String
+}
+
+"""
+Autogenerated return type of CreateTeamDiscussion
+"""
+type CreateTeamDiscussionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new discussion.
+  """
+  teamDiscussion: TeamDiscussion
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+}
+
+"""
+Represents the contribution a user made by committing to a repository.
+"""
+type CreatedCommitContribution implements Contribution {
+  """
+  How many commits were made on this day to this repository by the user.
+  """
+  commitCount: Int!
+
+  """
+  Whether this contribution is associated with a record you do not have access to. For
+  example, your own 'first issue' contribution may have been made on a repository you can no
+  longer access.
+  """
+  isRestricted: Boolean!
+
+  """
+  When this contribution was made.
+  """
+  occurredAt: DateTime!
+
+  """
+  The repository the user made a commit in.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this contribution.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this contribution.
+  """
+  url: URI!
+
+  """
+  The user who made this contribution.
+  """
+  user: User!
+}
+
+"""
+The connection type for CreatedCommitContribution.
+"""
+type CreatedCommitContributionConnection {
+  """
+  A list of edges.
+  """
+  edges: [CreatedCommitContributionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CreatedCommitContribution]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of commits across days and repositories in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CreatedCommitContributionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CreatedCommitContribution
+}
+
+"""
+Represents the contribution a user made on GitHub by opening an issue.
+"""
+type CreatedIssueContribution implements Contribution {
+  """
+  Whether this contribution is associated with a record you do not have access to. For
+  example, your own 'first issue' contribution may have been made on a repository you can no
+  longer access.
+  """
+  isRestricted: Boolean!
+
+  """
+  The issue that was opened.
+  """
+  issue: Issue!
+
+  """
+  When this contribution was made.
+  """
+  occurredAt: DateTime!
+
+  """
+  The HTTP path for this contribution.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this contribution.
+  """
+  url: URI!
+
+  """
+  The user who made this contribution.
+  """
+  user: User!
+}
+
+"""
+The connection type for CreatedIssueContribution.
+"""
+type CreatedIssueContributionConnection {
+  """
+  A list of edges.
+  """
+  edges: [CreatedIssueContributionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CreatedIssueContribution]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CreatedIssueContributionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CreatedIssueContribution
+}
+
+"""
+Represents either a issue the viewer can access or a restricted contribution.
+"""
+union CreatedIssueOrRestrictedContribution = CreatedIssueContribution | RestrictedContribution
+
+"""
+Represents the contribution a user made on GitHub by opening a pull request.
+"""
+type CreatedPullRequestContribution implements Contribution {
+  """
+  Whether this contribution is associated with a record you do not have access to. For
+  example, your own 'first issue' contribution may have been made on a repository you can no
+  longer access.
+  """
+  isRestricted: Boolean!
+
+  """
+  When this contribution was made.
+  """
+  occurredAt: DateTime!
+
+  """
+  The pull request that was opened.
+  """
+  pullRequest: PullRequest!
+
+  """
+  The HTTP path for this contribution.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this contribution.
+  """
+  url: URI!
+
+  """
+  The user who made this contribution.
+  """
+  user: User!
+}
+
+"""
+The connection type for CreatedPullRequestContribution.
+"""
+type CreatedPullRequestContributionConnection {
+  """
+  A list of edges.
+  """
+  edges: [CreatedPullRequestContributionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CreatedPullRequestContribution]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CreatedPullRequestContributionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CreatedPullRequestContribution
+}
+
+"""
+Represents either a pull request the viewer can access or a restricted contribution.
+"""
+union CreatedPullRequestOrRestrictedContribution = CreatedPullRequestContribution | RestrictedContribution
+
+"""
+Represents the contribution a user made by leaving a review on a pull request.
+"""
+type CreatedPullRequestReviewContribution implements Contribution {
+  """
+  Whether this contribution is associated with a record you do not have access to. For
+  example, your own 'first issue' contribution may have been made on a repository you can no
+  longer access.
+  """
+  isRestricted: Boolean!
+
+  """
+  When this contribution was made.
+  """
+  occurredAt: DateTime!
+
+  """
+  The pull request the user reviewed.
+  """
+  pullRequest: PullRequest!
+
+  """
+  The review the user left on the pull request.
+  """
+  pullRequestReview: PullRequestReview!
+
+  """
+  The repository containing the pull request that the user reviewed.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this contribution.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this contribution.
+  """
+  url: URI!
+
+  """
+  The user who made this contribution.
+  """
+  user: User!
+}
+
+"""
+The connection type for CreatedPullRequestReviewContribution.
+"""
+type CreatedPullRequestReviewContributionConnection {
+  """
+  A list of edges.
+  """
+  edges: [CreatedPullRequestReviewContributionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CreatedPullRequestReviewContribution]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CreatedPullRequestReviewContributionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CreatedPullRequestReviewContribution
+}
+
+"""
+Represents the contribution a user made on GitHub by creating a repository.
+"""
+type CreatedRepositoryContribution implements Contribution {
+  """
+  Whether this contribution is associated with a record you do not have access to. For
+  example, your own 'first issue' contribution may have been made on a repository you can no
+  longer access.
+  """
+  isRestricted: Boolean!
+
+  """
+  When this contribution was made.
+  """
+  occurredAt: DateTime!
+
+  """
+  The repository that was created.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this contribution.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this contribution.
+  """
+  url: URI!
+
+  """
+  The user who made this contribution.
+  """
+  user: User!
+}
+
+"""
+The connection type for CreatedRepositoryContribution.
+"""
+type CreatedRepositoryContributionConnection {
+  """
+  A list of edges.
+  """
+  edges: [CreatedRepositoryContributionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CreatedRepositoryContribution]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CreatedRepositoryContributionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CreatedRepositoryContribution
+}
+
+"""
+Represents either a repository the viewer can access or a restricted contribution.
+"""
+union CreatedRepositoryOrRestrictedContribution = CreatedRepositoryContribution | RestrictedContribution
+
+"""
+Represents a mention made by one issue or pull request to another.
+"""
+type CrossReferencedEvent implements Node & UniformResourceLocatable {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the CrossReferencedEvent object
+  """
+  id: ID!
+
+  """
+  Reference originated in a different repository.
+  """
+  isCrossRepository: Boolean!
+
+  """
+  Identifies when the reference was made.
+  """
+  referencedAt: DateTime!
+
+  """
+  The HTTP path for this pull request.
+  """
+  resourcePath: URI!
+
+  """
+  Issue or pull request that made the reference.
+  """
+  source: ReferencedSubject!
+
+  """
+  Issue or pull request to which the reference was made.
+  """
+  target: ReferencedSubject!
+
+  """
+  The HTTP URL for this pull request.
+  """
+  url: URI!
+
+  """
+  Checks if the target will be closed when the source is merged.
+  """
+  willCloseTarget: Boolean!
+}
+
+"""
+An ISO-8601 encoded date string.
+"""
+scalar Date
+
+"""
+An ISO-8601 encoded UTC date string.
+"""
+scalar DateTime
+
+"""
+Autogenerated input type of DeclineTopicSuggestion
+"""
+input DeclineTopicSuggestionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of the suggested topic.
+  """
+  name: String!
+
+  """
+  The reason why the suggested topic is declined.
+  """
+  reason: TopicSuggestionDeclineReason!
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of DeclineTopicSuggestion
+"""
+type DeclineTopicSuggestionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The declined topic.
+  """
+  topic: Topic
+}
+
+"""
+The possible base permissions for repositories.
+"""
+enum DefaultRepositoryPermissionField {
+  """
+  Can read, write, and administrate repos by default
+  """
+  ADMIN
+
+  """
+  No access
+  """
+  NONE
+
+  """
+  Can read repos by default
+  """
+  READ
+
+  """
+  Can read and write repos by default
+  """
+  WRITE
+}
+
+"""
+Entities that can be deleted.
+"""
+interface Deletable {
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+}
+
+"""
+Autogenerated input type of DeleteBranchProtectionRule
+"""
+input DeleteBranchProtectionRuleInput {
+  """
+  The global relay id of the branch protection rule to be deleted.
+  """
+  branchProtectionRuleId: ID! @possibleTypes(concreteTypes: ["BranchProtectionRule"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated return type of DeleteBranchProtectionRule
+"""
+type DeleteBranchProtectionRulePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteDeployment
+"""
+input DeleteDeploymentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the deployment to be deleted.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["Deployment"])
+}
+
+"""
+Autogenerated return type of DeleteDeployment
+"""
+type DeleteDeploymentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteDiscussionComment
+"""
+input DeleteDiscussionCommentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node id of the discussion comment to delete.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["DiscussionComment"])
+}
+
+"""
+Autogenerated return type of DeleteDiscussionComment
+"""
+type DeleteDiscussionCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The discussion comment that was just deleted.
+  """
+  comment: DiscussionComment
+}
+
+"""
+Autogenerated input type of DeleteDiscussion
+"""
+input DeleteDiscussionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the discussion to delete.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["Discussion"])
+}
+
+"""
+Autogenerated return type of DeleteDiscussion
+"""
+type DeleteDiscussionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The discussion that was just deleted.
+  """
+  discussion: Discussion
+}
+
+"""
+Autogenerated input type of DeleteEnvironment
+"""
+input DeleteEnvironmentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the environment to be deleted.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["Environment"])
+}
+
+"""
+Autogenerated return type of DeleteEnvironment
+"""
+type DeleteEnvironmentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteIpAllowListEntry
+"""
+input DeleteIpAllowListEntryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the IP allow list entry to delete.
+  """
+  ipAllowListEntryId: ID! @possibleTypes(concreteTypes: ["IpAllowListEntry"])
+}
+
+"""
+Autogenerated return type of DeleteIpAllowListEntry
+"""
+type DeleteIpAllowListEntryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The IP allow list entry that was deleted.
+  """
+  ipAllowListEntry: IpAllowListEntry
+}
+
+"""
+Autogenerated input type of DeleteIssueComment
+"""
+input DeleteIssueCommentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the comment to delete.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["IssueComment"])
+}
+
+"""
+Autogenerated return type of DeleteIssueComment
+"""
+type DeleteIssueCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteIssue
+"""
+input DeleteIssueInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the issue to delete.
+  """
+  issueId: ID! @possibleTypes(concreteTypes: ["Issue"])
+}
+
+"""
+Autogenerated return type of DeleteIssue
+"""
+type DeleteIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The repository the issue belonged to
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of DeleteLabel
+"""
+input DeleteLabelInput @preview(toggledBy: "bane-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the label to be deleted.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["Label"])
+}
+
+"""
+Autogenerated return type of DeleteLabel
+"""
+type DeleteLabelPayload @preview(toggledBy: "bane-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteLinkedBranch
+"""
+input DeleteLinkedBranchInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the linked branch
+  """
+  linkedBranchId: ID! @possibleTypes(concreteTypes: ["LinkedBranch"])
+}
+
+"""
+Autogenerated return type of DeleteLinkedBranch
+"""
+type DeleteLinkedBranchPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue the linked branch was unlinked from.
+  """
+  issue: Issue
+}
+
+"""
+Autogenerated input type of DeletePackageVersion
+"""
+input DeletePackageVersionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the package version to be deleted.
+  """
+  packageVersionId: ID! @possibleTypes(concreteTypes: ["PackageVersion"])
+}
+
+"""
+Autogenerated return type of DeletePackageVersion
+"""
+type DeletePackageVersionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Whether or not the operation succeeded.
+  """
+  success: Boolean
+}
+
+"""
+Autogenerated input type of DeleteProjectCard
+"""
+input DeleteProjectCardInput {
+  """
+  The id of the card to delete.
+  """
+  cardId: ID! @possibleTypes(concreteTypes: ["ProjectCard"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated return type of DeleteProjectCard
+"""
+type DeleteProjectCardPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The column the deleted card was in.
+  """
+  column: ProjectColumn
+
+  """
+  The deleted card ID.
+  """
+  deletedCardId: ID
+}
+
+"""
+Autogenerated input type of DeleteProjectColumn
+"""
+input DeleteProjectColumnInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the column to delete.
+  """
+  columnId: ID! @possibleTypes(concreteTypes: ["ProjectColumn"])
+}
+
+"""
+Autogenerated return type of DeleteProjectColumn
+"""
+type DeleteProjectColumnPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The deleted column ID.
+  """
+  deletedColumnId: ID
+
+  """
+  The project the deleted column was in.
+  """
+  project: Project
+}
+
+"""
+Autogenerated input type of DeleteProject
+"""
+input DeleteProjectInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Project ID to update.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["Project"])
+}
+
+"""
+Autogenerated return type of DeleteProject
+"""
+type DeleteProjectPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The repository or organization the project was removed from.
+  """
+  owner: ProjectOwner
+}
+
+"""
+Autogenerated input type of DeleteProjectV2Field
+"""
+input DeleteProjectV2FieldInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the field to delete.
+  """
+  fieldId: ID!
+    @possibleTypes(
+      concreteTypes: ["ProjectV2Field", "ProjectV2IterationField", "ProjectV2SingleSelectField"]
+      abstractType: "ProjectV2FieldConfiguration"
+    )
+}
+
+"""
+Autogenerated return type of DeleteProjectV2Field
+"""
+type DeleteProjectV2FieldPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The deleted field.
+  """
+  projectV2Field: ProjectV2FieldConfiguration
+}
+
+"""
+Autogenerated input type of DeleteProjectV2
+"""
+input DeleteProjectV2Input {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Project to delete.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated input type of DeleteProjectV2Item
+"""
+input DeleteProjectV2ItemInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the item to be removed.
+  """
+  itemId: ID! @possibleTypes(concreteTypes: ["ProjectV2Item"])
+
+  """
+  The ID of the Project from which the item should be removed.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of DeleteProjectV2Item
+"""
+type DeleteProjectV2ItemPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the deleted item.
+  """
+  deletedItemId: ID
+}
+
+"""
+Autogenerated return type of DeleteProjectV2
+"""
+type DeleteProjectV2Payload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The deleted Project.
+  """
+  projectV2: ProjectV2
+}
+
+"""
+Autogenerated input type of DeleteProjectV2Workflow
+"""
+input DeleteProjectV2WorkflowInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the workflow to be removed.
+  """
+  workflowId: ID! @possibleTypes(concreteTypes: ["ProjectV2Workflow"])
+}
+
+"""
+Autogenerated return type of DeleteProjectV2Workflow
+"""
+type DeleteProjectV2WorkflowPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the deleted workflow.
+  """
+  deletedWorkflowId: ID
+
+  """
+  The project the deleted workflow was in.
+  """
+  projectV2: ProjectV2
+}
+
+"""
+Autogenerated input type of DeletePullRequestReviewComment
+"""
+input DeletePullRequestReviewCommentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the comment to delete.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["PullRequestReviewComment"])
+}
+
+"""
+Autogenerated return type of DeletePullRequestReviewComment
+"""
+type DeletePullRequestReviewCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request review the deleted comment belonged to.
+  """
+  pullRequestReview: PullRequestReview
+
+  """
+  The deleted pull request review comment.
+  """
+  pullRequestReviewComment: PullRequestReviewComment
+}
+
+"""
+Autogenerated input type of DeletePullRequestReview
+"""
+input DeletePullRequestReviewInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the pull request review to delete.
+  """
+  pullRequestReviewId: ID! @possibleTypes(concreteTypes: ["PullRequestReview"])
+}
+
+"""
+Autogenerated return type of DeletePullRequestReview
+"""
+type DeletePullRequestReviewPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The deleted pull request review.
+  """
+  pullRequestReview: PullRequestReview
+}
+
+"""
+Autogenerated input type of DeleteRef
+"""
+input DeleteRefInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the Ref to be deleted.
+  """
+  refId: ID! @possibleTypes(concreteTypes: ["Ref"])
+}
+
+"""
+Autogenerated return type of DeleteRef
+"""
+type DeleteRefPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteRepositoryRuleset
+"""
+input DeleteRepositoryRulesetInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The global relay id of the repository ruleset to be deleted.
+  """
+  repositoryRulesetId: ID! @possibleTypes(concreteTypes: ["RepositoryRuleset"])
+}
+
+"""
+Autogenerated return type of DeleteRepositoryRuleset
+"""
+type DeleteRepositoryRulesetPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteTeamDiscussionComment
+"""
+input DeleteTeamDiscussionCommentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the comment to delete.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["TeamDiscussionComment"])
+}
+
+"""
+Autogenerated return type of DeleteTeamDiscussionComment
+"""
+type DeleteTeamDiscussionCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteTeamDiscussion
+"""
+input DeleteTeamDiscussionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The discussion ID to delete.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["TeamDiscussion"])
+}
+
+"""
+Autogenerated return type of DeleteTeamDiscussion
+"""
+type DeleteTeamDiscussionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteVerifiableDomain
+"""
+input DeleteVerifiableDomainInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the verifiable domain to delete.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["VerifiableDomain"])
+}
+
+"""
+Autogenerated return type of DeleteVerifiableDomain
+"""
+type DeleteVerifiableDomainPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The owning account from which the domain was deleted.
+  """
+  owner: VerifiableDomainOwner
+}
+
+"""
+Represents a 'demilestoned' event on a given issue or pull request.
+"""
+type DemilestonedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the DemilestonedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the milestone title associated with the 'demilestoned' event.
+  """
+  milestoneTitle: String!
+
+  """
+  Object referenced by event.
+  """
+  subject: MilestoneItem!
+}
+
+"""
+A Dependabot Update for a dependency in a repository
+"""
+type DependabotUpdate implements RepositoryNode {
+  """
+  The error from a dependency update
+  """
+  error: DependabotUpdateError
+
+  """
+  The associated pull request
+  """
+  pullRequest: PullRequest
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+}
+
+"""
+An error produced from a Dependabot Update
+"""
+type DependabotUpdateError {
+  """
+  The body of the error
+  """
+  body: String!
+
+  """
+  The error code
+  """
+  errorType: String!
+
+  """
+  The title of the error
+  """
+  title: String!
+}
+
+"""
+A dependency manifest entry
+"""
+type DependencyGraphDependency @preview(toggledBy: "hawkgirl-preview") {
+  """
+  Does the dependency itself have dependencies?
+  """
+  hasDependencies: Boolean!
+
+  """
+  The original name of the package, as it appears in the manifest.
+  """
+  packageLabel: String!
+    @deprecated(
+      reason: "`packageLabel` will be removed. Use normalized `packageName` field instead. Removal on 2022-10-01 UTC."
+    )
+
+  """
+  The dependency package manager
+  """
+  packageManager: String
+
+  """
+  The name of the package in the canonical form used by the package manager.
+  """
+  packageName: String!
+
+  """
+  The repository containing the package
+  """
+  repository: Repository
+
+  """
+  The dependency version requirements
+  """
+  requirements: String!
+}
+
+"""
+The connection type for DependencyGraphDependency.
+"""
+type DependencyGraphDependencyConnection @preview(toggledBy: "hawkgirl-preview") {
+  """
+  A list of edges.
+  """
+  edges: [DependencyGraphDependencyEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DependencyGraphDependency]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DependencyGraphDependencyEdge @preview(toggledBy: "hawkgirl-preview") {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DependencyGraphDependency
+}
+
+"""
+The possible ecosystems of a dependency graph package.
+"""
+enum DependencyGraphEcosystem {
+  """
+  GitHub Actions
+  """
+  ACTIONS
+
+  """
+  PHP packages hosted at packagist.org
+  """
+  COMPOSER
+
+  """
+  Go modules
+  """
+  GO
+
+  """
+  Java artifacts hosted at the Maven central repository
+  """
+  MAVEN
+
+  """
+  JavaScript packages hosted at npmjs.com
+  """
+  NPM
+
+  """
+  .NET packages hosted at the NuGet Gallery
+  """
+  NUGET
+
+  """
+  Python packages hosted at PyPI.org
+  """
+  PIP
+
+  """
+  Dart packages hosted at pub.dev
+  """
+  PUB
+
+  """
+  Ruby gems hosted at RubyGems.org
+  """
+  RUBYGEMS
+
+  """
+  Rust crates
+  """
+  RUST
+
+  """
+  Swift packages
+  """
+  SWIFT
+}
+
+"""
+Dependency manifest for a repository
+"""
+type DependencyGraphManifest implements Node @preview(toggledBy: "hawkgirl-preview") {
+  """
+  Path to view the manifest file blob
+  """
+  blobPath: String!
+
+  """
+  A list of manifest dependencies
+  """
+  dependencies(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DependencyGraphDependencyConnection
+
+  """
+  The number of dependencies listed in the manifest
+  """
+  dependenciesCount: Int
+
+  """
+  Is the manifest too big to parse?
+  """
+  exceedsMaxSize: Boolean!
+
+  """
+  Fully qualified manifest filename
+  """
+  filename: String!
+
+  """
+  The Node ID of the DependencyGraphManifest object
+  """
+  id: ID!
+
+  """
+  Were we able to parse the manifest?
+  """
+  parseable: Boolean!
+
+  """
+  The repository containing the manifest
+  """
+  repository: Repository!
+}
+
+"""
+The connection type for DependencyGraphManifest.
+"""
+type DependencyGraphManifestConnection @preview(toggledBy: "hawkgirl-preview") {
+  """
+  A list of edges.
+  """
+  edges: [DependencyGraphManifestEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DependencyGraphManifest]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DependencyGraphManifestEdge @preview(toggledBy: "hawkgirl-preview") {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DependencyGraphManifest
+}
+
+"""
+A repository deploy key.
+"""
+type DeployKey implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the DeployKey object
+  """
+  id: ID!
+
+  """
+  The deploy key.
+  """
+  key: String!
+
+  """
+  Whether or not the deploy key is read only.
+  """
+  readOnly: Boolean!
+
+  """
+  The deploy key title.
+  """
+  title: String!
+
+  """
+  Whether or not the deploy key has been verified.
+  """
+  verified: Boolean!
+}
+
+"""
+The connection type for DeployKey.
+"""
+type DeployKeyConnection {
+  """
+  A list of edges.
+  """
+  edges: [DeployKeyEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DeployKey]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DeployKeyEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DeployKey
+}
+
+"""
+Represents a 'deployed' event on a given pull request.
+"""
+type DeployedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The deployment associated with the 'deployed' event.
+  """
+  deployment: Deployment!
+
+  """
+  The Node ID of the DeployedEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  The ref associated with the 'deployed' event.
+  """
+  ref: Ref
+}
+
+"""
+Represents triggered deployment instance.
+"""
+type Deployment implements Node {
+  """
+  Identifies the commit sha of the deployment.
+  """
+  commit: Commit
+
+  """
+  Identifies the oid of the deployment commit, even if the commit has been deleted.
+  """
+  commitOid: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the actor who triggered the deployment.
+  """
+  creator: Actor!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The deployment description.
+  """
+  description: String
+
+  """
+  The latest environment to which this deployment was made.
+  """
+  environment: String
+
+  """
+  The Node ID of the Deployment object
+  """
+  id: ID!
+
+  """
+  The latest environment to which this deployment was made.
+  """
+  latestEnvironment: String
+
+  """
+  The latest status of this deployment.
+  """
+  latestStatus: DeploymentStatus
+
+  """
+  The original environment to which this deployment was made.
+  """
+  originalEnvironment: String
+
+  """
+  Extra information that a deployment system might need.
+  """
+  payload: String
+
+  """
+  Identifies the Ref of the deployment, if the deployment was created by ref.
+  """
+  ref: Ref
+
+  """
+  Identifies the repository associated with the deployment.
+  """
+  repository: Repository!
+
+  """
+  The current state of the deployment.
+  """
+  state: DeploymentState
+
+  """
+  A list of statuses associated with the deployment.
+  """
+  statuses(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DeploymentStatusConnection
+
+  """
+  The deployment task.
+  """
+  task: String
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for Deployment.
+"""
+type DeploymentConnection {
+  """
+  A list of edges.
+  """
+  edges: [DeploymentEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Deployment]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DeploymentEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Deployment
+}
+
+"""
+Represents a 'deployment_environment_changed' event on a given pull request.
+"""
+type DeploymentEnvironmentChangedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The deployment status that updated the deployment environment.
+  """
+  deploymentStatus: DeploymentStatus!
+
+  """
+  The Node ID of the DeploymentEnvironmentChangedEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+}
+
+"""
+Ordering options for deployment connections
+"""
+input DeploymentOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order deployments by.
+  """
+  field: DeploymentOrderField!
+}
+
+"""
+Properties by which deployment connections can be ordered.
+"""
+enum DeploymentOrderField {
+  """
+  Order collection by creation time
+  """
+  CREATED_AT
+}
+
+"""
+A protection rule.
+"""
+type DeploymentProtectionRule {
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  Whether deployments to this environment can be approved by the user who created the deployment.
+  """
+  preventSelfReview: Boolean
+
+  """
+  The teams or users that can review the deployment
+  """
+  reviewers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DeploymentReviewerConnection!
+
+  """
+  The timeout in minutes for this protection rule.
+  """
+  timeout: Int!
+
+  """
+  The type of protection rule.
+  """
+  type: DeploymentProtectionRuleType!
+}
+
+"""
+The connection type for DeploymentProtectionRule.
+"""
+type DeploymentProtectionRuleConnection {
+  """
+  A list of edges.
+  """
+  edges: [DeploymentProtectionRuleEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DeploymentProtectionRule]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DeploymentProtectionRuleEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DeploymentProtectionRule
+}
+
+"""
+The possible protection rule types.
+"""
+enum DeploymentProtectionRuleType {
+  """
+  Required reviewers
+  """
+  REQUIRED_REVIEWERS
+
+  """
+  Wait timer
+  """
+  WAIT_TIMER
+}
+
+"""
+A request to deploy a workflow run to an environment.
+"""
+type DeploymentRequest {
+  """
+  Whether or not the current user can approve the deployment
+  """
+  currentUserCanApprove: Boolean!
+
+  """
+  The target environment of the deployment
+  """
+  environment: Environment!
+
+  """
+  The teams or users that can review the deployment
+  """
+  reviewers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DeploymentReviewerConnection!
+
+  """
+  The wait timer in minutes configured in the environment
+  """
+  waitTimer: Int!
+
+  """
+  The wait timer in minutes configured in the environment
+  """
+  waitTimerStartedAt: DateTime
+}
+
+"""
+The connection type for DeploymentRequest.
+"""
+type DeploymentRequestConnection {
+  """
+  A list of edges.
+  """
+  edges: [DeploymentRequestEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DeploymentRequest]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DeploymentRequestEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DeploymentRequest
+}
+
+"""
+A deployment review.
+"""
+type DeploymentReview implements Node {
+  """
+  The comment the user left.
+  """
+  comment: String!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The environments approved or rejected
+  """
+  environments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): EnvironmentConnection!
+
+  """
+  The Node ID of the DeploymentReview object
+  """
+  id: ID!
+
+  """
+  The decision of the user.
+  """
+  state: DeploymentReviewState!
+
+  """
+  The user that reviewed the deployment.
+  """
+  user: User!
+}
+
+"""
+The connection type for DeploymentReview.
+"""
+type DeploymentReviewConnection {
+  """
+  A list of edges.
+  """
+  edges: [DeploymentReviewEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DeploymentReview]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DeploymentReviewEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DeploymentReview
+}
+
+"""
+The possible states for a deployment review.
+"""
+enum DeploymentReviewState {
+  """
+  The deployment was approved.
+  """
+  APPROVED
+
+  """
+  The deployment was rejected.
+  """
+  REJECTED
+}
+
+"""
+Users and teams.
+"""
+union DeploymentReviewer = Team | User
+
+"""
+The connection type for DeploymentReviewer.
+"""
+type DeploymentReviewerConnection {
+  """
+  A list of edges.
+  """
+  edges: [DeploymentReviewerEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DeploymentReviewer]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DeploymentReviewerEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DeploymentReviewer
+}
+
+"""
+The possible states in which a deployment can be.
+"""
+enum DeploymentState {
+  """
+  The pending deployment was not updated after 30 minutes.
+  """
+  ABANDONED
+
+  """
+  The deployment is currently active.
+  """
+  ACTIVE
+
+  """
+  An inactive transient deployment.
+  """
+  DESTROYED
+
+  """
+  The deployment experienced an error.
+  """
+  ERROR
+
+  """
+  The deployment has failed.
+  """
+  FAILURE
+
+  """
+  The deployment is inactive.
+  """
+  INACTIVE
+
+  """
+  The deployment is in progress.
+  """
+  IN_PROGRESS
+
+  """
+  The deployment is pending.
+  """
+  PENDING
+
+  """
+  The deployment has queued
+  """
+  QUEUED
+
+  """
+  The deployment was successful.
+  """
+  SUCCESS
+
+  """
+  The deployment is waiting.
+  """
+  WAITING
+}
+
+"""
+Describes the status of a given deployment attempt.
+"""
+type DeploymentStatus implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the actor who triggered the deployment.
+  """
+  creator: Actor!
+
+  """
+  Identifies the deployment associated with status.
+  """
+  deployment: Deployment!
+
+  """
+  Identifies the description of the deployment.
+  """
+  description: String
+
+  """
+  Identifies the environment of the deployment at the time of this deployment status
+  """
+  environment: String @preview(toggledBy: "flash-preview")
+
+  """
+  Identifies the environment URL of the deployment.
+  """
+  environmentUrl: URI
+
+  """
+  The Node ID of the DeploymentStatus object
+  """
+  id: ID!
+
+  """
+  Identifies the log URL of the deployment.
+  """
+  logUrl: URI
+
+  """
+  Identifies the current state of the deployment.
+  """
+  state: DeploymentStatusState!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for DeploymentStatus.
+"""
+type DeploymentStatusConnection {
+  """
+  A list of edges.
+  """
+  edges: [DeploymentStatusEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DeploymentStatus]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DeploymentStatusEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DeploymentStatus
+}
+
+"""
+The possible states for a deployment status.
+"""
+enum DeploymentStatusState {
+  """
+  The deployment experienced an error.
+  """
+  ERROR
+
+  """
+  The deployment has failed.
+  """
+  FAILURE
+
+  """
+  The deployment is inactive.
+  """
+  INACTIVE
+
+  """
+  The deployment is in progress.
+  """
+  IN_PROGRESS
+
+  """
+  The deployment is pending.
+  """
+  PENDING
+
+  """
+  The deployment is queued
+  """
+  QUEUED
+
+  """
+  The deployment was successful.
+  """
+  SUCCESS
+
+  """
+  The deployment is waiting.
+  """
+  WAITING
+}
+
+"""
+Autogenerated input type of DequeuePullRequest
+"""
+input DequeuePullRequestInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the pull request to be dequeued.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of DequeuePullRequest
+"""
+type DequeuePullRequestPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The merge queue entry of the dequeued pull request.
+  """
+  mergeQueueEntry: MergeQueueEntry
+}
+
+"""
+The possible sides of a diff.
+"""
+enum DiffSide {
+  """
+  The left side of the diff.
+  """
+  LEFT
+
+  """
+  The right side of the diff.
+  """
+  RIGHT
+}
+
+"""
+Autogenerated input type of DisablePullRequestAutoMerge
+"""
+input DisablePullRequestAutoMergeInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the pull request to disable auto merge on.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of DisablePullRequestAutoMerge
+"""
+type DisablePullRequestAutoMergePayload {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request auto merge was disabled on.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'disconnected' event on a given issue or pull request.
+"""
+type DisconnectedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the DisconnectedEvent object
+  """
+  id: ID!
+
+  """
+  Reference originated in a different repository.
+  """
+  isCrossRepository: Boolean!
+
+  """
+  Issue or pull request from which the issue was disconnected.
+  """
+  source: ReferencedSubject!
+
+  """
+  Issue or pull request which was disconnected.
+  """
+  subject: ReferencedSubject!
+}
+
+"""
+A discussion in a repository.
+"""
+type Discussion implements Closable & Comment & Deletable & Labelable & Lockable & Node & Reactable & RepositoryNode & Subscribable & Updatable & Votable {
+  """
+  Reason that the conversation was locked.
+  """
+  activeLockReason: LockReason
+
+  """
+  The comment chosen as this discussion's answer, if any.
+  """
+  answer: DiscussionComment
+
+  """
+  The time when a user chose this discussion's answer, if answered.
+  """
+  answerChosenAt: DateTime
+
+  """
+  The user who chose this discussion's answer, if answered.
+  """
+  answerChosenBy: Actor
+
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  The main text of the discussion post.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  The category for this discussion.
+  """
+  category: DiscussionCategory!
+
+  """
+  Indicates if the object is closed (definition of closed may depend on type)
+  """
+  closed: Boolean!
+
+  """
+  Identifies the date and time when the object was closed.
+  """
+  closedAt: DateTime
+
+  """
+  The replies to the discussion.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DiscussionCommentConnection!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The Node ID of the Discussion object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Only return answered/unanswered discussions
+  """
+  isAnswered: Boolean
+
+  """
+  A list of labels associated with the object.
+  """
+  labels(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for labels returned from the connection.
+    """
+    orderBy: LabelOrder = {field: CREATED_AT, direction: ASC}
+  ): LabelConnection
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  `true` if the object is locked
+  """
+  locked: Boolean!
+
+  """
+  The number identifying this discussion within the repository.
+  """
+  number: Int!
+
+  """
+  The poll associated with this discussion, if one exists.
+  """
+  poll: DiscussionPoll
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  The path for this discussion.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the reason for the discussion's state.
+  """
+  stateReason: DiscussionStateReason
+
+  """
+  The title of this discussion.
+  """
+  title: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  Number of upvotes that this subject has received.
+  """
+  upvoteCount: Int!
+
+  """
+  The URL for this discussion.
+  """
+  url: URI!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Indicates if the object can be closed by the viewer.
+  """
+  viewerCanClose: Boolean!
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Indicates if the object can be reopened by the viewer.
+  """
+  viewerCanReopen: Boolean!
+
+  """
+  Check if the viewer is able to change their subscription status for the repository.
+  """
+  viewerCanSubscribe: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Whether or not the current user can add or remove an upvote on this subject.
+  """
+  viewerCanUpvote: Boolean!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+
+  """
+  Whether or not the current user has already upvoted this subject.
+  """
+  viewerHasUpvoted: Boolean!
+
+  """
+  Identifies if the viewer is watching, not watching, or ignoring the subscribable entity.
+  """
+  viewerSubscription: SubscriptionState
+}
+
+"""
+A category for discussions in a repository.
+"""
+type DiscussionCategory implements Node & RepositoryNode {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  A description of this category.
+  """
+  description: String
+
+  """
+  An emoji representing this category.
+  """
+  emoji: String!
+
+  """
+  This category's emoji rendered as HTML.
+  """
+  emojiHTML: HTML!
+
+  """
+  The Node ID of the DiscussionCategory object
+  """
+  id: ID!
+
+  """
+  Whether or not discussions in this category support choosing an answer with the markDiscussionCommentAsAnswer mutation.
+  """
+  isAnswerable: Boolean!
+
+  """
+  The name of this category.
+  """
+  name: String!
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  The slug of this category.
+  """
+  slug: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for DiscussionCategory.
+"""
+type DiscussionCategoryConnection {
+  """
+  A list of edges.
+  """
+  edges: [DiscussionCategoryEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DiscussionCategory]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DiscussionCategoryEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DiscussionCategory
+}
+
+"""
+The possible reasons for closing a discussion.
+"""
+enum DiscussionCloseReason {
+  """
+  The discussion is a duplicate of another
+  """
+  DUPLICATE
+
+  """
+  The discussion is no longer relevant
+  """
+  OUTDATED
+
+  """
+  The discussion has been resolved
+  """
+  RESOLVED
+}
+
+"""
+A comment on a discussion.
+"""
+type DiscussionComment implements Comment & Deletable & Minimizable & Node & Reactable & Updatable & UpdatableComment & Votable {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  The body as Markdown.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The time when this replied-to comment was deleted
+  """
+  deletedAt: DateTime
+
+  """
+  The discussion this comment was created in
+  """
+  discussion: Discussion
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The Node ID of the DiscussionComment object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Has this comment been chosen as the answer of its discussion?
+  """
+  isAnswer: Boolean!
+
+  """
+  Returns whether or not a comment has been minimized.
+  """
+  isMinimized: Boolean!
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Returns why the comment was minimized. One of `abuse`, `off-topic`,
+  `outdated`, `resolved`, `duplicate` and `spam`. Note that the case and
+  formatting of these values differs from the inputs to the `MinimizeComment` mutation.
+  """
+  minimizedReason: String
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The threaded replies to this comment.
+  """
+  replies(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DiscussionCommentConnection!
+
+  """
+  The discussion comment this comment is a reply to
+  """
+  replyTo: DiscussionComment
+
+  """
+  The path for this discussion comment.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  Number of upvotes that this subject has received.
+  """
+  upvoteCount: Int!
+
+  """
+  The URL for this discussion comment.
+  """
+  url: URI!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Can the current user mark this comment as an answer?
+  """
+  viewerCanMarkAsAnswer: Boolean!
+
+  """
+  Check if the current viewer can minimize this object.
+  """
+  viewerCanMinimize: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Can the current user unmark this comment as an answer?
+  """
+  viewerCanUnmarkAsAnswer: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Whether or not the current user can add or remove an upvote on this subject.
+  """
+  viewerCanUpvote: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+
+  """
+  Whether or not the current user has already upvoted this subject.
+  """
+  viewerHasUpvoted: Boolean!
+}
+
+"""
+The connection type for DiscussionComment.
+"""
+type DiscussionCommentConnection {
+  """
+  A list of edges.
+  """
+  edges: [DiscussionCommentEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DiscussionComment]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DiscussionCommentEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DiscussionComment
+}
+
+"""
+The connection type for Discussion.
+"""
+type DiscussionConnection {
+  """
+  A list of edges.
+  """
+  edges: [DiscussionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Discussion]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DiscussionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Discussion
+}
+
+"""
+Ways in which lists of discussions can be ordered upon return.
+"""
+input DiscussionOrder {
+  """
+  The direction in which to order discussions by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field by which to order discussions.
+  """
+  field: DiscussionOrderField!
+}
+
+"""
+Properties by which discussion connections can be ordered.
+"""
+enum DiscussionOrderField {
+  """
+  Order discussions by creation time.
+  """
+  CREATED_AT
+
+  """
+  Order discussions by most recent modification time.
+  """
+  UPDATED_AT
+}
+
+"""
+A poll for a discussion.
+"""
+type DiscussionPoll implements Node {
+  """
+  The discussion that this poll belongs to.
+  """
+  discussion: Discussion
+
+  """
+  The Node ID of the DiscussionPoll object
+  """
+  id: ID!
+
+  """
+  The options for this poll.
+  """
+  options(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    How to order the options for the discussion poll.
+    """
+    orderBy: DiscussionPollOptionOrder = {field: AUTHORED_ORDER, direction: ASC}
+  ): DiscussionPollOptionConnection
+
+  """
+  The question that is being asked by this poll.
+  """
+  question: String!
+
+  """
+  The total number of votes that have been cast for this poll.
+  """
+  totalVoteCount: Int!
+
+  """
+  Indicates if the viewer has permission to vote in this poll.
+  """
+  viewerCanVote: Boolean!
+
+  """
+  Indicates if the viewer has voted for any option in this poll.
+  """
+  viewerHasVoted: Boolean!
+}
+
+"""
+An option for a discussion poll.
+"""
+type DiscussionPollOption implements Node {
+  """
+  The Node ID of the DiscussionPollOption object
+  """
+  id: ID!
+
+  """
+  The text for this option.
+  """
+  option: String!
+
+  """
+  The discussion poll that this option belongs to.
+  """
+  poll: DiscussionPoll
+
+  """
+  The total number of votes that have been cast for this option.
+  """
+  totalVoteCount: Int!
+
+  """
+  Indicates if the viewer has voted for this option in the poll.
+  """
+  viewerHasVoted: Boolean!
+}
+
+"""
+The connection type for DiscussionPollOption.
+"""
+type DiscussionPollOptionConnection {
+  """
+  A list of edges.
+  """
+  edges: [DiscussionPollOptionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DiscussionPollOption]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DiscussionPollOptionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DiscussionPollOption
+}
+
+"""
+Ordering options for discussion poll option connections.
+"""
+input DiscussionPollOptionOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order poll options by.
+  """
+  field: DiscussionPollOptionOrderField!
+}
+
+"""
+Properties by which discussion poll option connections can be ordered.
+"""
+enum DiscussionPollOptionOrderField {
+  """
+  Order poll options by the order that the poll author specified when creating the poll.
+  """
+  AUTHORED_ORDER
+
+  """
+  Order poll options by the number of votes it has.
+  """
+  VOTE_COUNT
+}
+
+"""
+The possible states of a discussion.
+"""
+enum DiscussionState {
+  """
+  A discussion that has been closed
+  """
+  CLOSED
+
+  """
+  A discussion that is open
+  """
+  OPEN
+}
+
+"""
+The possible state reasons of a discussion.
+"""
+enum DiscussionStateReason {
+  """
+  The discussion is a duplicate of another
+  """
+  DUPLICATE
+
+  """
+  The discussion is no longer relevant
+  """
+  OUTDATED
+
+  """
+  The discussion was reopened
+  """
+  REOPENED
+
+  """
+  The discussion has been resolved
+  """
+  RESOLVED
+}
+
+"""
+Autogenerated input type of DismissPullRequestReview
+"""
+input DismissPullRequestReviewInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The contents of the pull request review dismissal message.
+  """
+  message: String!
+
+  """
+  The Node ID of the pull request review to modify.
+  """
+  pullRequestReviewId: ID! @possibleTypes(concreteTypes: ["PullRequestReview"])
+}
+
+"""
+Autogenerated return type of DismissPullRequestReview
+"""
+type DismissPullRequestReviewPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The dismissed pull request review.
+  """
+  pullRequestReview: PullRequestReview
+}
+
+"""
+The possible reasons that a Dependabot alert was dismissed.
+"""
+enum DismissReason {
+  """
+  A fix has already been started
+  """
+  FIX_STARTED
+
+  """
+  This alert is inaccurate or incorrect
+  """
+  INACCURATE
+
+  """
+  Vulnerable code is not actually used
+  """
+  NOT_USED
+
+  """
+  No bandwidth to fix this
+  """
+  NO_BANDWIDTH
+
+  """
+  Risk is tolerable to this project
+  """
+  TOLERABLE_RISK
+}
+
+"""
+Autogenerated input type of DismissRepositoryVulnerabilityAlert
+"""
+input DismissRepositoryVulnerabilityAlertInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The reason the Dependabot alert is being dismissed.
+  """
+  dismissReason: DismissReason!
+
+  """
+  The Dependabot alert ID to dismiss.
+  """
+  repositoryVulnerabilityAlertId: ID! @possibleTypes(concreteTypes: ["RepositoryVulnerabilityAlert"])
+}
+
+"""
+Autogenerated return type of DismissRepositoryVulnerabilityAlert
+"""
+type DismissRepositoryVulnerabilityAlertPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Dependabot alert that was dismissed
+  """
+  repositoryVulnerabilityAlert: RepositoryVulnerabilityAlert
+}
+
+"""
+A draft issue within a project.
+"""
+type DraftIssue implements Node {
+  """
+  A list of users to assigned to this draft issue.
+  """
+  assignees(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  The body of the draft issue.
+  """
+  body: String!
+
+  """
+  The body of the draft issue rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body of the draft issue rendered to text.
+  """
+  bodyText: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created this draft issue.
+  """
+  creator: Actor
+
+  """
+  The Node ID of the DraftIssue object
+  """
+  id: ID!
+
+  """
+  List of items linked with the draft issue (currently draft issue can be linked to only one item).
+  """
+  projectV2Items(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2ItemConnection!
+
+  """
+  Projects that link to this draft issue (currently draft issue can be linked to only one project).
+  """
+  projectsV2(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2Connection!
+
+  """
+  The title of the draft issue
+  """
+  title: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+Specifies a review comment to be left with a Pull Request Review.
+"""
+input DraftPullRequestReviewComment {
+  """
+  Body of the comment to leave.
+  """
+  body: String!
+
+  """
+  Path to the file being commented on.
+  """
+  path: String!
+
+  """
+  Position in the file to leave a comment on.
+  """
+  position: Int!
+}
+
+"""
+Specifies a review comment thread to be left with a Pull Request Review.
+"""
+input DraftPullRequestReviewThread {
+  """
+  Body of the comment to leave.
+  """
+  body: String!
+
+  """
+  The line of the blob to which the thread refers. The end of the line range for multi-line comments.
+  """
+  line: Int!
+
+  """
+  Path to the file being commented on.
+  """
+  path: String!
+
+  """
+  The side of the diff on which the line resides. For multi-line comments, this is the side for the end of the line range.
+  """
+  side: DiffSide = RIGHT
+
+  """
+  The first line of the range to which the comment refers.
+  """
+  startLine: Int
+
+  """
+  The side of the diff on which the start line resides.
+  """
+  startSide: DiffSide = RIGHT
+}
+
+"""
+Autogenerated input type of EnablePullRequestAutoMerge
+"""
+input EnablePullRequestAutoMergeInput {
+  """
+  The email address to associate with this merge.
+  """
+  authorEmail: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Commit body to use for the commit when the PR is mergeable; if omitted, a
+  default message will be used. NOTE: when merging with a merge queue any input
+  value for commit message is ignored.
+  """
+  commitBody: String
+
+  """
+  Commit headline to use for the commit when the PR is mergeable; if omitted, a
+  default message will be used. NOTE: when merging with a merge queue any input
+  value for commit headline is ignored.
+  """
+  commitHeadline: String
+
+  """
+  The expected head OID of the pull request.
+  """
+  expectedHeadOid: GitObjectID
+
+  """
+  The merge method to use. If omitted, defaults to `MERGE`. NOTE: when merging
+  with a merge queue any input value for merge method is ignored.
+  """
+  mergeMethod: PullRequestMergeMethod = MERGE
+
+  """
+  ID of the pull request to enable auto-merge on.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of EnablePullRequestAutoMerge
+"""
+type EnablePullRequestAutoMergePayload {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request auto-merge was enabled on.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Autogenerated input type of EnqueuePullRequest
+"""
+input EnqueuePullRequestInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The expected head OID of the pull request.
+  """
+  expectedHeadOid: GitObjectID
+
+  """
+  Add the pull request to the front of the queue.
+  """
+  jump: Boolean
+
+  """
+  The ID of the pull request to enqueue.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of EnqueuePullRequest
+"""
+type EnqueuePullRequestPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The merge queue entry for the enqueued pull request.
+  """
+  mergeQueueEntry: MergeQueueEntry
+}
+
+"""
+An account to manage multiple organizations with consolidated policy and billing.
+"""
+type Enterprise implements AnnouncementBanner & Node {
+  """
+  The text of the announcement
+  """
+  announcement: String
+
+  """
+  The expiration date of the announcement, if any
+  """
+  announcementExpiresAt: DateTime
+
+  """
+  Whether the announcement can be dismissed by the user
+  """
+  announcementUserDismissible: Boolean
+
+  """
+  A URL pointing to the enterprise's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  Enterprise billing information visible to enterprise billing managers.
+  """
+  billingInfo: EnterpriseBillingInfo
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The description of the enterprise.
+  """
+  description: String
+
+  """
+  The description of the enterprise as HTML.
+  """
+  descriptionHTML: HTML!
+
+  """
+  The Node ID of the Enterprise object
+  """
+  id: ID!
+
+  """
+  The location of the enterprise.
+  """
+  location: String
+
+  """
+  A list of users who are members of this enterprise.
+  """
+  members(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Only return members within the selected GitHub Enterprise deployment
+    """
+    deployment: EnterpriseUserDeployment
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Only return members with this two-factor authentication status. Does not
+    include members who only have an account on a GitHub Enterprise Server instance.
+    """
+    hasTwoFactorEnabled: Boolean = null
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for members returned from the connection.
+    """
+    orderBy: EnterpriseMemberOrder = {field: LOGIN, direction: ASC}
+
+    """
+    Only return members within the organizations with these logins
+    """
+    organizationLogins: [String!]
+
+    """
+    The search string to look for.
+    """
+    query: String
+
+    """
+    The role of the user in the enterprise organization or server.
+    """
+    role: EnterpriseUserAccountMembershipRole
+  ): EnterpriseMemberConnection!
+
+  """
+  The name of the enterprise.
+  """
+  name: String!
+
+  """
+  A list of organizations that belong to this enterprise.
+  """
+  organizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations returned from the connection.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The search string to look for.
+    """
+    query: String
+
+    """
+    The viewer's role in an organization.
+    """
+    viewerOrganizationRole: RoleInOrganization
+  ): OrganizationConnection!
+
+  """
+  Enterprise information visible to enterprise owners or enterprise owners'
+  personal access tokens (classic) with read:enterprise or admin:enterprise scope.
+  """
+  ownerInfo: EnterpriseOwnerInfo
+
+  """
+  The HTTP path for this enterprise.
+  """
+  resourcePath: URI!
+
+  """
+  The URL-friendly identifier for the enterprise.
+  """
+  slug: String!
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  url: URI!
+
+  """
+  Is the current viewer an admin of this enterprise?
+  """
+  viewerIsAdmin: Boolean!
+
+  """
+  The URL of the enterprise website.
+  """
+  websiteUrl: URI
+}
+
+"""
+The connection type for User.
+"""
+type EnterpriseAdministratorConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseAdministratorEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+A User who is an administrator of an enterprise.
+"""
+type EnterpriseAdministratorEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: User
+
+  """
+  The role of the administrator.
+  """
+  role: EnterpriseAdministratorRole!
+}
+
+"""
+An invitation for a user to become an owner or billing manager of an enterprise.
+"""
+type EnterpriseAdministratorInvitation implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The email of the person who was invited to the enterprise.
+  """
+  email: String
+
+  """
+  The enterprise the invitation is for.
+  """
+  enterprise: Enterprise!
+
+  """
+  The Node ID of the EnterpriseAdministratorInvitation object
+  """
+  id: ID!
+
+  """
+  The user who was invited to the enterprise.
+  """
+  invitee: User
+
+  """
+  The user who created the invitation.
+  """
+  inviter: User
+
+  """
+  The invitee's pending role in the enterprise (owner or billing_manager).
+  """
+  role: EnterpriseAdministratorRole!
+}
+
+"""
+The connection type for EnterpriseAdministratorInvitation.
+"""
+type EnterpriseAdministratorInvitationConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseAdministratorInvitationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [EnterpriseAdministratorInvitation]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type EnterpriseAdministratorInvitationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: EnterpriseAdministratorInvitation
+}
+
+"""
+Ordering options for enterprise administrator invitation connections
+"""
+input EnterpriseAdministratorInvitationOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order enterprise administrator invitations by.
+  """
+  field: EnterpriseAdministratorInvitationOrderField!
+}
+
+"""
+Properties by which enterprise administrator invitation connections can be ordered.
+"""
+enum EnterpriseAdministratorInvitationOrderField {
+  """
+  Order enterprise administrator member invitations by creation time
+  """
+  CREATED_AT
+}
+
+"""
+The possible administrator roles in an enterprise account.
+"""
+enum EnterpriseAdministratorRole {
+  """
+  Represents a billing manager of the enterprise account.
+  """
+  BILLING_MANAGER
+
+  """
+  Represents an owner of the enterprise account.
+  """
+  OWNER
+}
+
+"""
+The possible values for the enterprise allow private repository forking policy value.
+"""
+enum EnterpriseAllowPrivateRepositoryForkingPolicyValue {
+  """
+  Members can fork a repository to an organization within this enterprise.
+  """
+  ENTERPRISE_ORGANIZATIONS
+
+  """
+  Members can fork a repository to their enterprise-managed user account or an organization inside this enterprise.
+  """
+  ENTERPRISE_ORGANIZATIONS_USER_ACCOUNTS
+
+  """
+  Members can fork a repository to their user account or an organization, either inside or outside of this enterprise.
+  """
+  EVERYWHERE
+
+  """
+  Members can fork a repository only within the same organization (intra-org).
+  """
+  SAME_ORGANIZATION
+
+  """
+  Members can fork a repository to their user account or within the same organization.
+  """
+  SAME_ORGANIZATION_USER_ACCOUNTS
+
+  """
+  Members can fork a repository to their user account.
+  """
+  USER_ACCOUNTS
+}
+
+"""
+Metadata for an audit entry containing enterprise account information.
+"""
+interface EnterpriseAuditEntryData {
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+}
+
+"""
+Enterprise billing information visible to enterprise billing managers and owners.
+"""
+type EnterpriseBillingInfo {
+  """
+  The number of licenseable users/emails across the enterprise.
+  """
+  allLicensableUsersCount: Int!
+
+  """
+  The number of data packs used by all organizations owned by the enterprise.
+  """
+  assetPacks: Int!
+
+  """
+  The bandwidth quota in GB for all organizations owned by the enterprise.
+  """
+  bandwidthQuota: Float!
+
+  """
+  The bandwidth usage in GB for all organizations owned by the enterprise.
+  """
+  bandwidthUsage: Float!
+
+  """
+  The bandwidth usage as a percentage of the bandwidth quota.
+  """
+  bandwidthUsagePercentage: Int!
+
+  """
+  The storage quota in GB for all organizations owned by the enterprise.
+  """
+  storageQuota: Float!
+
+  """
+  The storage usage in GB for all organizations owned by the enterprise.
+  """
+  storageUsage: Float!
+
+  """
+  The storage usage as a percentage of the storage quota.
+  """
+  storageUsagePercentage: Int!
+
+  """
+  The number of available licenses across all owned organizations based on the unique number of billable users.
+  """
+  totalAvailableLicenses: Int!
+
+  """
+  The total number of licenses allocated.
+  """
+  totalLicenses: Int!
+}
+
+"""
+The connection type for Enterprise.
+"""
+type EnterpriseConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Enterprise]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+The possible values for the enterprise base repository permission setting.
+"""
+enum EnterpriseDefaultRepositoryPermissionSettingValue {
+  """
+  Organization members will be able to clone, pull, push, and add new collaborators to all organization repositories.
+  """
+  ADMIN
+
+  """
+  Organization members will only be able to clone and pull public repositories.
+  """
+  NONE
+
+  """
+  Organizations in the enterprise choose base repository permissions for their members.
+  """
+  NO_POLICY
+
+  """
+  Organization members will be able to clone and pull all organization repositories.
+  """
+  READ
+
+  """
+  Organization members will be able to clone, pull, and push all organization repositories.
+  """
+  WRITE
+}
+
+"""
+An edge in a connection.
+"""
+type EnterpriseEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Enterprise
+}
+
+"""
+The possible values for an enabled/disabled enterprise setting.
+"""
+enum EnterpriseEnabledDisabledSettingValue {
+  """
+  The setting is disabled for organizations in the enterprise.
+  """
+  DISABLED
+
+  """
+  The setting is enabled for organizations in the enterprise.
+  """
+  ENABLED
+
+  """
+  There is no policy set for organizations in the enterprise.
+  """
+  NO_POLICY
+}
+
+"""
+The possible values for an enabled/no policy enterprise setting.
+"""
+enum EnterpriseEnabledSettingValue {
+  """
+  The setting is enabled for organizations in the enterprise.
+  """
+  ENABLED
+
+  """
+  There is no policy set for organizations in the enterprise.
+  """
+  NO_POLICY
+}
+
+"""
+The connection type for OrganizationInvitation.
+"""
+type EnterpriseFailedInvitationConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseFailedInvitationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [OrganizationInvitation]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+
+  """
+  Identifies the total count of unique users in the connection.
+  """
+  totalUniqueUserCount: Int!
+}
+
+"""
+A failed invitation to be a member in an enterprise organization.
+"""
+type EnterpriseFailedInvitationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: OrganizationInvitation
+}
+
+"""
+An identity provider configured to provision identities for an enterprise.
+Visible to enterprise owners or enterprise owners' personal access tokens
+(classic) with read:enterprise or admin:enterprise scope.
+"""
+type EnterpriseIdentityProvider implements Node {
+  """
+  The digest algorithm used to sign SAML requests for the identity provider.
+  """
+  digestMethod: SamlDigestAlgorithm
+
+  """
+  The enterprise this identity provider belongs to.
+  """
+  enterprise: Enterprise
+
+  """
+  ExternalIdentities provisioned by this identity provider.
+  """
+  externalIdentities(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter to external identities with the users login
+    """
+    login: String
+
+    """
+    Filter to external identities with valid org membership only
+    """
+    membersOnly: Boolean
+
+    """
+    Filter to external identities with the users userName/NameID attribute
+    """
+    userName: String
+  ): ExternalIdentityConnection!
+
+  """
+  The Node ID of the EnterpriseIdentityProvider object
+  """
+  id: ID!
+
+  """
+  The x509 certificate used by the identity provider to sign assertions and responses.
+  """
+  idpCertificate: X509Certificate
+
+  """
+  The Issuer Entity ID for the SAML identity provider.
+  """
+  issuer: String
+
+  """
+  Recovery codes that can be used by admins to access the enterprise if the identity provider is unavailable.
+  """
+  recoveryCodes: [String!]
+
+  """
+  The signature algorithm used to sign SAML requests for the identity provider.
+  """
+  signatureMethod: SamlSignatureAlgorithm
+
+  """
+  The URL endpoint for the identity provider's SAML SSO.
+  """
+  ssoUrl: URI
+}
+
+"""
+An object that is a member of an enterprise.
+"""
+union EnterpriseMember = EnterpriseUserAccount | User
+
+"""
+The connection type for EnterpriseMember.
+"""
+type EnterpriseMemberConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseMemberEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [EnterpriseMember]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+A User who is a member of an enterprise through one or more organizations.
+"""
+type EnterpriseMemberEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: EnterpriseMember
+}
+
+"""
+Ordering options for enterprise member connections.
+"""
+input EnterpriseMemberOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order enterprise members by.
+  """
+  field: EnterpriseMemberOrderField!
+}
+
+"""
+Properties by which enterprise member connections can be ordered.
+"""
+enum EnterpriseMemberOrderField {
+  """
+  Order enterprise members by creation time
+  """
+  CREATED_AT
+
+  """
+  Order enterprise members by login
+  """
+  LOGIN
+}
+
+"""
+The possible values for the enterprise members can create repositories setting.
+"""
+enum EnterpriseMembersCanCreateRepositoriesSettingValue {
+  """
+  Members will be able to create public and private repositories.
+  """
+  ALL
+
+  """
+  Members will not be able to create public or private repositories.
+  """
+  DISABLED
+
+  """
+  Organization owners choose whether to allow members to create repositories.
+  """
+  NO_POLICY
+
+  """
+  Members will be able to create only private repositories.
+  """
+  PRIVATE
+
+  """
+  Members will be able to create only public repositories.
+  """
+  PUBLIC
+}
+
+"""
+The possible values for the members can make purchases setting.
+"""
+enum EnterpriseMembersCanMakePurchasesSettingValue {
+  """
+  The setting is disabled for organizations in the enterprise.
+  """
+  DISABLED
+
+  """
+  The setting is enabled for organizations in the enterprise.
+  """
+  ENABLED
+}
+
+"""
+The possible values we have for filtering Platform::Objects::User#enterprises.
+"""
+enum EnterpriseMembershipType {
+  """
+  Returns all enterprises in which the user is an admin.
+  """
+  ADMIN
+
+  """
+  Returns all enterprises in which the user is a member, admin, or billing manager.
+  """
+  ALL
+
+  """
+  Returns all enterprises in which the user is a billing manager.
+  """
+  BILLING_MANAGER
+
+  """
+  Returns all enterprises in which the user is a member of an org that is owned by the enterprise.
+  """
+  ORG_MEMBERSHIP
+}
+
+"""
+Ordering options for enterprises.
+"""
+input EnterpriseOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order enterprises by.
+  """
+  field: EnterpriseOrderField!
+}
+
+"""
+Properties by which enterprise connections can be ordered.
+"""
+enum EnterpriseOrderField {
+  """
+  Order enterprises by name
+  """
+  NAME
+}
+
+"""
+The connection type for Organization.
+"""
+type EnterpriseOrganizationMembershipConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseOrganizationMembershipEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Organization]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An enterprise organization that a user is a member of.
+"""
+type EnterpriseOrganizationMembershipEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Organization
+
+  """
+  The role of the user in the enterprise membership.
+  """
+  role: EnterpriseUserAccountMembershipRole!
+}
+
+"""
+The connection type for User.
+"""
+type EnterpriseOutsideCollaboratorConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseOutsideCollaboratorEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+A User who is an outside collaborator of an enterprise through one or more organizations.
+"""
+type EnterpriseOutsideCollaboratorEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: User
+
+  """
+  The enterprise organization repositories this user is a member of.
+  """
+  repositories(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories.
+    """
+    orderBy: RepositoryOrder = {field: NAME, direction: ASC}
+  ): EnterpriseRepositoryInfoConnection!
+}
+
+"""
+Enterprise information visible to enterprise owners or enterprise owners'
+personal access tokens (classic) with read:enterprise or admin:enterprise scope.
+"""
+type EnterpriseOwnerInfo {
+  """
+  A list of all of the administrators for this enterprise.
+  """
+  admins(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Only return administrators with this two-factor authentication status.
+    """
+    hasTwoFactorEnabled: Boolean = null
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for administrators returned from the connection.
+    """
+    orderBy: EnterpriseMemberOrder = {field: LOGIN, direction: ASC}
+
+    """
+    Only return members within the organizations with these logins
+    """
+    organizationLogins: [String!]
+
+    """
+    The search string to look for.
+    """
+    query: String
+
+    """
+    The role to filter by.
+    """
+    role: EnterpriseAdministratorRole
+  ): EnterpriseAdministratorConnection!
+
+  """
+  A list of users in the enterprise who currently have two-factor authentication disabled.
+  """
+  affiliatedUsersWithTwoFactorDisabled(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  Whether or not affiliated users with two-factor authentication disabled exist in the enterprise.
+  """
+  affiliatedUsersWithTwoFactorDisabledExist: Boolean!
+
+  """
+  The setting value for whether private repository forking is enabled for repositories in organizations in this enterprise.
+  """
+  allowPrivateRepositoryForkingSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided private repository forking setting value.
+  """
+  allowPrivateRepositoryForkingSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  The value for the allow private repository forking policy on the enterprise.
+  """
+  allowPrivateRepositoryForkingSettingPolicyValue: EnterpriseAllowPrivateRepositoryForkingPolicyValue
+
+  """
+  The setting value for base repository permissions for organizations in this enterprise.
+  """
+  defaultRepositoryPermissionSetting: EnterpriseDefaultRepositoryPermissionSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided base repository permission.
+  """
+  defaultRepositoryPermissionSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The permission to find organizations for.
+    """
+    value: DefaultRepositoryPermissionField!
+  ): OrganizationConnection!
+
+  """
+  A list of domains owned by the enterprise. Visible to enterprise owners or
+  enterprise owners' personal access tokens (classic) with admin:enterprise scope.
+  """
+  domains(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Filter whether or not the domain is approved.
+    """
+    isApproved: Boolean = null
+
+    """
+    Filter whether or not the domain is verified.
+    """
+    isVerified: Boolean = null
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for verifiable domains returned.
+    """
+    orderBy: VerifiableDomainOrder = {field: DOMAIN, direction: ASC}
+  ): VerifiableDomainConnection!
+
+  """
+  Enterprise Server installations owned by the enterprise.
+  """
+  enterpriseServerInstallations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Whether or not to only return installations discovered via GitHub Connect.
+    """
+    connectedOnly: Boolean = false
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for Enterprise Server installations returned.
+    """
+    orderBy: EnterpriseServerInstallationOrder = {field: HOST_NAME, direction: ASC}
+  ): EnterpriseServerInstallationConnection!
+
+  """
+  A list of failed invitations in the enterprise.
+  """
+  failedInvitations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    The search string to look for.
+    """
+    query: String
+  ): EnterpriseFailedInvitationConnection!
+
+  """
+  The setting value for whether the enterprise has an IP allow list enabled.
+  """
+  ipAllowListEnabledSetting: IpAllowListEnabledSettingValue!
+
+  """
+  The IP addresses that are allowed to access resources owned by the enterprise.
+  Visible to enterprise owners or enterprise owners' personal access tokens
+  (classic) with admin:enterprise scope.
+  """
+  ipAllowListEntries(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for IP allow list entries returned.
+    """
+    orderBy: IpAllowListEntryOrder = {field: ALLOW_LIST_VALUE, direction: ASC}
+  ): IpAllowListEntryConnection!
+
+  """
+  The setting value for whether the enterprise has IP allow list configuration for installed GitHub Apps enabled.
+  """
+  ipAllowListForInstalledAppsEnabledSetting: IpAllowListForInstalledAppsEnabledSettingValue!
+
+  """
+  Whether or not the base repository permission is currently being updated.
+  """
+  isUpdatingDefaultRepositoryPermission: Boolean!
+
+  """
+  Whether the two-factor authentication requirement is currently being enforced.
+  """
+  isUpdatingTwoFactorRequirement: Boolean!
+
+  """
+  The setting value for whether organization members with admin permissions on a
+  repository can change repository visibility.
+  """
+  membersCanChangeRepositoryVisibilitySetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided can change repository visibility setting value.
+  """
+  membersCanChangeRepositoryVisibilitySettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  The setting value for whether members of organizations in the enterprise can create internal repositories.
+  """
+  membersCanCreateInternalRepositoriesSetting: Boolean
+
+  """
+  The setting value for whether members of organizations in the enterprise can create private repositories.
+  """
+  membersCanCreatePrivateRepositoriesSetting: Boolean
+
+  """
+  The setting value for whether members of organizations in the enterprise can create public repositories.
+  """
+  membersCanCreatePublicRepositoriesSetting: Boolean
+
+  """
+  The setting value for whether members of organizations in the enterprise can create repositories.
+  """
+  membersCanCreateRepositoriesSetting: EnterpriseMembersCanCreateRepositoriesSettingValue
+
+  """
+  A list of enterprise organizations configured with the provided repository creation setting value.
+  """
+  membersCanCreateRepositoriesSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting to find organizations for.
+    """
+    value: OrganizationMembersCanCreateRepositoriesSettingValue!
+  ): OrganizationConnection!
+
+  """
+  The setting value for whether members with admin permissions for repositories can delete issues.
+  """
+  membersCanDeleteIssuesSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided members can delete issues setting value.
+  """
+  membersCanDeleteIssuesSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  The setting value for whether members with admin permissions for repositories can delete or transfer repositories.
+  """
+  membersCanDeleteRepositoriesSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided members can delete repositories setting value.
+  """
+  membersCanDeleteRepositoriesSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  The setting value for whether members of organizations in the enterprise can invite outside collaborators.
+  """
+  membersCanInviteCollaboratorsSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided members can invite collaborators setting value.
+  """
+  membersCanInviteCollaboratorsSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  Indicates whether members of this enterprise's organizations can purchase additional services for those organizations.
+  """
+  membersCanMakePurchasesSetting: EnterpriseMembersCanMakePurchasesSettingValue!
+
+  """
+  The setting value for whether members with admin permissions for repositories can update protected branches.
+  """
+  membersCanUpdateProtectedBranchesSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided members can update protected branches setting value.
+  """
+  membersCanUpdateProtectedBranchesSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  The setting value for whether members can view dependency insights.
+  """
+  membersCanViewDependencyInsightsSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided members can view dependency insights setting value.
+  """
+  membersCanViewDependencyInsightsSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  Indicates if email notification delivery for this enterprise is restricted to verified or approved domains.
+  """
+  notificationDeliveryRestrictionEnabledSetting: NotificationRestrictionSettingValue!
+
+  """
+  The OIDC Identity Provider for the enterprise.
+  """
+  oidcProvider: OIDCProvider
+
+  """
+  The setting value for whether organization projects are enabled for organizations in this enterprise.
+  """
+  organizationProjectsSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided organization projects setting value.
+  """
+  organizationProjectsSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  A list of outside collaborators across the repositories in the enterprise.
+  """
+  outsideCollaborators(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Only return outside collaborators with this two-factor authentication status.
+    """
+    hasTwoFactorEnabled: Boolean = null
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    The login of one specific outside collaborator.
+    """
+    login: String
+
+    """
+    Ordering options for outside collaborators returned from the connection.
+    """
+    orderBy: EnterpriseMemberOrder = {field: LOGIN, direction: ASC}
+
+    """
+    Only return outside collaborators within the organizations with these logins
+    """
+    organizationLogins: [String!]
+
+    """
+    The search string to look for.
+    """
+    query: String
+
+    """
+    Only return outside collaborators on repositories with this visibility.
+    """
+    visibility: RepositoryVisibility
+  ): EnterpriseOutsideCollaboratorConnection!
+
+  """
+  A list of pending administrator invitations for the enterprise.
+  """
+  pendingAdminInvitations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pending enterprise administrator invitations returned from the connection.
+    """
+    orderBy: EnterpriseAdministratorInvitationOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    The search string to look for.
+    """
+    query: String
+
+    """
+    The role to filter by.
+    """
+    role: EnterpriseAdministratorRole
+  ): EnterpriseAdministratorInvitationConnection!
+
+  """
+  A list of pending collaborator invitations across the repositories in the enterprise.
+  """
+  pendingCollaboratorInvitations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pending repository collaborator invitations returned from the connection.
+    """
+    orderBy: RepositoryInvitationOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    The search string to look for.
+    """
+    query: String
+  ): RepositoryInvitationConnection!
+
+  """
+  A list of pending member invitations for organizations in the enterprise.
+  """
+  pendingMemberInvitations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Only return invitations matching this invitation source
+    """
+    invitationSource: OrganizationInvitationSource
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Only return invitations within the organizations with these logins
+    """
+    organizationLogins: [String!]
+
+    """
+    The search string to look for.
+    """
+    query: String
+  ): EnterprisePendingMemberInvitationConnection!
+
+  """
+  The setting value for whether repository projects are enabled in this enterprise.
+  """
+  repositoryProjectsSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided repository projects setting value.
+  """
+  repositoryProjectsSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  The SAML Identity Provider for the enterprise.
+  """
+  samlIdentityProvider: EnterpriseIdentityProvider
+
+  """
+  A list of enterprise organizations configured with the SAML single sign-on setting value.
+  """
+  samlIdentityProviderSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: IdentityProviderConfigurationState!
+  ): OrganizationConnection!
+
+  """
+  A list of members with a support entitlement.
+  """
+  supportEntitlements(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for support entitlement users returned from the connection.
+    """
+    orderBy: EnterpriseMemberOrder = {field: LOGIN, direction: ASC}
+  ): EnterpriseMemberConnection!
+
+  """
+  The setting value for whether team discussions are enabled for organizations in this enterprise.
+  """
+  teamDiscussionsSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided team discussions setting value.
+  """
+  teamDiscussionsSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  The setting value for whether the enterprise requires two-factor authentication for its organizations and users.
+  """
+  twoFactorRequiredSetting: EnterpriseEnabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the two-factor authentication setting value.
+  """
+  twoFactorRequiredSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+}
+
+"""
+The connection type for OrganizationInvitation.
+"""
+type EnterprisePendingMemberInvitationConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterprisePendingMemberInvitationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [OrganizationInvitation]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+
+  """
+  Identifies the total count of unique users in the connection.
+  """
+  totalUniqueUserCount: Int!
+}
+
+"""
+An invitation to be a member in an enterprise organization.
+"""
+type EnterprisePendingMemberInvitationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: OrganizationInvitation
+}
+
+"""
+A subset of repository information queryable from an enterprise.
+"""
+type EnterpriseRepositoryInfo implements Node {
+  """
+  The Node ID of the EnterpriseRepositoryInfo object
+  """
+  id: ID!
+
+  """
+  Identifies if the repository is private or internal.
+  """
+  isPrivate: Boolean!
+
+  """
+  The repository's name.
+  """
+  name: String!
+
+  """
+  The repository's name with owner.
+  """
+  nameWithOwner: String!
+}
+
+"""
+The connection type for EnterpriseRepositoryInfo.
+"""
+type EnterpriseRepositoryInfoConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseRepositoryInfoEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [EnterpriseRepositoryInfo]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type EnterpriseRepositoryInfoEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: EnterpriseRepositoryInfo
+}
+
+"""
+An Enterprise Server installation.
+"""
+type EnterpriseServerInstallation implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The customer name to which the Enterprise Server installation belongs.
+  """
+  customerName: String!
+
+  """
+  The host name of the Enterprise Server installation.
+  """
+  hostName: String!
+
+  """
+  The Node ID of the EnterpriseServerInstallation object
+  """
+  id: ID!
+
+  """
+  Whether or not the installation is connected to an Enterprise Server installation via GitHub Connect.
+  """
+  isConnected: Boolean!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  User accounts on this Enterprise Server installation.
+  """
+  userAccounts(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for Enterprise Server user accounts returned from the connection.
+    """
+    orderBy: EnterpriseServerUserAccountOrder = {field: LOGIN, direction: ASC}
+  ): EnterpriseServerUserAccountConnection!
+
+  """
+  User accounts uploads for the Enterprise Server installation.
+  """
+  userAccountsUploads(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for Enterprise Server user accounts uploads returned from the connection.
+    """
+    orderBy: EnterpriseServerUserAccountsUploadOrder = {field: CREATED_AT, direction: DESC}
+  ): EnterpriseServerUserAccountsUploadConnection!
+}
+
+"""
+The connection type for EnterpriseServerInstallation.
+"""
+type EnterpriseServerInstallationConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseServerInstallationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [EnterpriseServerInstallation]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type EnterpriseServerInstallationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: EnterpriseServerInstallation
+}
+
+"""
+The connection type for EnterpriseServerInstallation.
+"""
+type EnterpriseServerInstallationMembershipConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseServerInstallationMembershipEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [EnterpriseServerInstallation]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An Enterprise Server installation that a user is a member of.
+"""
+type EnterpriseServerInstallationMembershipEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: EnterpriseServerInstallation
+
+  """
+  The role of the user in the enterprise membership.
+  """
+  role: EnterpriseUserAccountMembershipRole!
+}
+
+"""
+Ordering options for Enterprise Server installation connections.
+"""
+input EnterpriseServerInstallationOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order Enterprise Server installations by.
+  """
+  field: EnterpriseServerInstallationOrderField!
+}
+
+"""
+Properties by which Enterprise Server installation connections can be ordered.
+"""
+enum EnterpriseServerInstallationOrderField {
+  """
+  Order Enterprise Server installations by creation time
+  """
+  CREATED_AT
+
+  """
+  Order Enterprise Server installations by customer name
+  """
+  CUSTOMER_NAME
+
+  """
+  Order Enterprise Server installations by host name
+  """
+  HOST_NAME
+}
+
+"""
+A user account on an Enterprise Server installation.
+"""
+type EnterpriseServerUserAccount implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  User emails belonging to this user account.
+  """
+  emails(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for Enterprise Server user account emails returned from the connection.
+    """
+    orderBy: EnterpriseServerUserAccountEmailOrder = {field: EMAIL, direction: ASC}
+  ): EnterpriseServerUserAccountEmailConnection!
+
+  """
+  The Enterprise Server installation on which this user account exists.
+  """
+  enterpriseServerInstallation: EnterpriseServerInstallation!
+
+  """
+  The Node ID of the EnterpriseServerUserAccount object
+  """
+  id: ID!
+
+  """
+  Whether the user account is a site administrator on the Enterprise Server installation.
+  """
+  isSiteAdmin: Boolean!
+
+  """
+  The login of the user account on the Enterprise Server installation.
+  """
+  login: String!
+
+  """
+  The profile name of the user account on the Enterprise Server installation.
+  """
+  profileName: String
+
+  """
+  The date and time when the user account was created on the Enterprise Server installation.
+  """
+  remoteCreatedAt: DateTime!
+
+  """
+  The ID of the user account on the Enterprise Server installation.
+  """
+  remoteUserId: Int!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for EnterpriseServerUserAccount.
+"""
+type EnterpriseServerUserAccountConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseServerUserAccountEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [EnterpriseServerUserAccount]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type EnterpriseServerUserAccountEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: EnterpriseServerUserAccount
+}
+
+"""
+An email belonging to a user account on an Enterprise Server installation.
+"""
+type EnterpriseServerUserAccountEmail implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The email address.
+  """
+  email: String!
+
+  """
+  The Node ID of the EnterpriseServerUserAccountEmail object
+  """
+  id: ID!
+
+  """
+  Indicates whether this is the primary email of the associated user account.
+  """
+  isPrimary: Boolean!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The user account to which the email belongs.
+  """
+  userAccount: EnterpriseServerUserAccount!
+}
+
+"""
+The connection type for EnterpriseServerUserAccountEmail.
+"""
+type EnterpriseServerUserAccountEmailConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseServerUserAccountEmailEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [EnterpriseServerUserAccountEmail]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type EnterpriseServerUserAccountEmailEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: EnterpriseServerUserAccountEmail
+}
+
+"""
+Ordering options for Enterprise Server user account email connections.
+"""
+input EnterpriseServerUserAccountEmailOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order emails by.
+  """
+  field: EnterpriseServerUserAccountEmailOrderField!
+}
+
+"""
+Properties by which Enterprise Server user account email connections can be ordered.
+"""
+enum EnterpriseServerUserAccountEmailOrderField {
+  """
+  Order emails by email
+  """
+  EMAIL
+}
+
+"""
+Ordering options for Enterprise Server user account connections.
+"""
+input EnterpriseServerUserAccountOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order user accounts by.
+  """
+  field: EnterpriseServerUserAccountOrderField!
+}
+
+"""
+Properties by which Enterprise Server user account connections can be ordered.
+"""
+enum EnterpriseServerUserAccountOrderField {
+  """
+  Order user accounts by login
+  """
+  LOGIN
+
+  """
+  Order user accounts by creation time on the Enterprise Server installation
+  """
+  REMOTE_CREATED_AT
+}
+
+"""
+A user accounts upload from an Enterprise Server installation.
+"""
+type EnterpriseServerUserAccountsUpload implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The enterprise to which this upload belongs.
+  """
+  enterprise: Enterprise!
+
+  """
+  The Enterprise Server installation for which this upload was generated.
+  """
+  enterpriseServerInstallation: EnterpriseServerInstallation!
+
+  """
+  The Node ID of the EnterpriseServerUserAccountsUpload object
+  """
+  id: ID!
+
+  """
+  The name of the file uploaded.
+  """
+  name: String!
+
+  """
+  The synchronization state of the upload
+  """
+  syncState: EnterpriseServerUserAccountsUploadSyncState!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for EnterpriseServerUserAccountsUpload.
+"""
+type EnterpriseServerUserAccountsUploadConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseServerUserAccountsUploadEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [EnterpriseServerUserAccountsUpload]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type EnterpriseServerUserAccountsUploadEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: EnterpriseServerUserAccountsUpload
+}
+
+"""
+Ordering options for Enterprise Server user accounts upload connections.
+"""
+input EnterpriseServerUserAccountsUploadOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order user accounts uploads by.
+  """
+  field: EnterpriseServerUserAccountsUploadOrderField!
+}
+
+"""
+Properties by which Enterprise Server user accounts upload connections can be ordered.
+"""
+enum EnterpriseServerUserAccountsUploadOrderField {
+  """
+  Order user accounts uploads by creation time
+  """
+  CREATED_AT
+}
+
+"""
+Synchronization state of the Enterprise Server user accounts upload
+"""
+enum EnterpriseServerUserAccountsUploadSyncState {
+  """
+  The synchronization of the upload failed.
+  """
+  FAILURE
+
+  """
+  The synchronization of the upload is pending.
+  """
+  PENDING
+
+  """
+  The synchronization of the upload succeeded.
+  """
+  SUCCESS
+}
+
+"""
+An account for a user who is an admin of an enterprise or a member of an enterprise through one or more organizations.
+"""
+type EnterpriseUserAccount implements Actor & Node {
+  """
+  A URL pointing to the enterprise user account's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The enterprise in which this user account exists.
+  """
+  enterprise: Enterprise!
+
+  """
+  A list of Enterprise Server installations this user is a member of.
+  """
+  enterpriseInstallations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for installations returned from the connection.
+    """
+    orderBy: EnterpriseServerInstallationOrder = {field: HOST_NAME, direction: ASC}
+
+    """
+    The search string to look for.
+    """
+    query: String
+
+    """
+    The role of the user in the installation.
+    """
+    role: EnterpriseUserAccountMembershipRole
+  ): EnterpriseServerInstallationMembershipConnection!
+
+  """
+  The Node ID of the EnterpriseUserAccount object
+  """
+  id: ID!
+
+  """
+  An identifier for the enterprise user account, a login or email address
+  """
+  login: String!
+
+  """
+  The name of the enterprise user account
+  """
+  name: String
+
+  """
+  A list of enterprise organizations this user is a member of.
+  """
+  organizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations returned from the connection.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The search string to look for.
+    """
+    query: String
+
+    """
+    The role of the user in the enterprise organization.
+    """
+    role: EnterpriseUserAccountMembershipRole
+  ): EnterpriseOrganizationMembershipConnection!
+
+  """
+  The HTTP path for this user.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this user.
+  """
+  url: URI!
+
+  """
+  The user within the enterprise.
+  """
+  user: User
+}
+
+"""
+The possible roles for enterprise membership.
+"""
+enum EnterpriseUserAccountMembershipRole {
+  """
+  The user is a member of an organization in the enterprise.
+  """
+  MEMBER
+
+  """
+  The user is an owner of an organization in the enterprise.
+  """
+  OWNER
+
+  """
+  The user is not an owner of the enterprise, and not a member or owner of any
+  organizations in the enterprise; only for EMU-enabled enterprises.
+  """
+  UNAFFILIATED
+}
+
+"""
+The possible GitHub Enterprise deployments where this user can exist.
+"""
+enum EnterpriseUserDeployment {
+  """
+  The user is part of a GitHub Enterprise Cloud deployment.
+  """
+  CLOUD
+
+  """
+  The user is part of a GitHub Enterprise Server deployment.
+  """
+  SERVER
+}
+
+"""
+An environment.
+"""
+type Environment implements Node {
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the Environment object
+  """
+  id: ID!
+
+  """
+  The name of the environment
+  """
+  name: String!
+
+  """
+  The protection rules defined for this environment
+  """
+  protectionRules(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DeploymentProtectionRuleConnection!
+}
+
+"""
+The connection type for Environment.
+"""
+type EnvironmentConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnvironmentEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Environment]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type EnvironmentEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Environment
+}
+
+"""
+Properties by which environments connections can be ordered
+"""
+enum EnvironmentOrderField {
+  """
+  Order environments by name.
+  """
+  NAME
+}
+
+"""
+Ordering options for environments
+"""
+input Environments {
+  """
+  The direction in which to order environments by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order environments by.
+  """
+  field: EnvironmentOrderField!
+}
+
+"""
+An external identity provisioned by SAML SSO or SCIM. If SAML is configured on
+the organization, the external identity is visible to (1) organization owners,
+(2) organization owners' personal access tokens (classic) with read:org or
+admin:org scope, (3) GitHub App with an installation token with read or write
+access to members. If SAML is configured on the enterprise, the external
+identity is visible to (1) enterprise owners, (2) enterprise owners' personal
+access tokens (classic) with read:enterprise or admin:enterprise scope.
+"""
+type ExternalIdentity implements Node {
+  """
+  The GUID for this identity
+  """
+  guid: String!
+
+  """
+  The Node ID of the ExternalIdentity object
+  """
+  id: ID!
+
+  """
+  Organization invitation for this SCIM-provisioned external identity
+  """
+  organizationInvitation: OrganizationInvitation
+
+  """
+  SAML Identity attributes
+  """
+  samlIdentity: ExternalIdentitySamlAttributes
+
+  """
+  SCIM Identity attributes
+  """
+  scimIdentity: ExternalIdentityScimAttributes
+
+  """
+  User linked to this external identity. Will be NULL if this identity has not been claimed by an organization member.
+  """
+  user: User
+}
+
+"""
+An attribute for the External Identity attributes collection
+"""
+type ExternalIdentityAttribute {
+  """
+  The attribute metadata as JSON
+  """
+  metadata: String
+
+  """
+  The attribute name
+  """
+  name: String!
+
+  """
+  The attribute value
+  """
+  value: String!
+}
+
+"""
+The connection type for ExternalIdentity.
+"""
+type ExternalIdentityConnection {
+  """
+  A list of edges.
+  """
+  edges: [ExternalIdentityEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ExternalIdentity]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ExternalIdentityEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ExternalIdentity
+}
+
+"""
+SAML attributes for the External Identity
+"""
+type ExternalIdentitySamlAttributes {
+  """
+  SAML Identity attributes
+  """
+  attributes: [ExternalIdentityAttribute!]!
+
+  """
+  The emails associated with the SAML identity
+  """
+  emails: [UserEmailMetadata!]
+
+  """
+  Family name of the SAML identity
+  """
+  familyName: String
+
+  """
+  Given name of the SAML identity
+  """
+  givenName: String
+
+  """
+  The groups linked to this identity in IDP
+  """
+  groups: [String!]
+
+  """
+  The NameID of the SAML identity
+  """
+  nameId: String
+
+  """
+  The userName of the SAML identity
+  """
+  username: String
+}
+
+"""
+SCIM attributes for the External Identity
+"""
+type ExternalIdentityScimAttributes {
+  """
+  The emails associated with the SCIM identity
+  """
+  emails: [UserEmailMetadata!]
+
+  """
+  Family name of the SCIM identity
+  """
+  familyName: String
+
+  """
+  Given name of the SCIM identity
+  """
+  givenName: String
+
+  """
+  The groups linked to this identity in IDP
+  """
+  groups: [String!]
+
+  """
+  The userName of the SCIM identity
+  """
+  username: String
+}
+
+"""
+A command to add a file at the given path with the given contents as part of a
+commit.  Any existing file at that that path will be replaced.
+"""
+input FileAddition {
+  """
+  The base64 encoded contents of the file
+  """
+  contents: Base64String!
+
+  """
+  The path in the repository where the file will be located
+  """
+  path: String!
+}
+
+"""
+A description of a set of changes to a file tree to be made as part of
+a git commit, modeled as zero or more file `additions` and zero or more
+file `deletions`.
+
+Both fields are optional; omitting both will produce a commit with no
+file changes.
+
+`deletions` and `additions` describe changes to files identified
+by their path in the git tree using unix-style path separators, i.e.
+`/`.  The root of a git tree is an empty string, so paths are not
+slash-prefixed.
+
+`path` values must be unique across all `additions` and `deletions`
+provided.  Any duplication will result in a validation error.
+
+### Encoding
+
+File contents must be provided in full for each `FileAddition`.
+
+The `contents` of a `FileAddition` must be encoded using RFC 4648
+compliant base64, i.e. correct padding is required and no characters
+outside the standard alphabet may be used.  Invalid base64
+encoding will be rejected with a validation error.
+
+The encoded contents may be binary.
+
+For text files, no assumptions are made about the character encoding of
+the file contents (after base64 decoding).  No charset transcoding or
+line-ending normalization will be performed; it is the client's
+responsibility to manage the character encoding of files they provide.
+However, for maximum compatibility we recommend using UTF-8 encoding
+and ensuring that all files in a repository use a consistent
+line-ending convention (`\n` or `\r\n`), and that all files end
+with a newline.
+
+### Modeling file changes
+
+Each of the the five types of conceptual changes that can be made in a
+git commit can be described using the `FileChanges` type as follows:
+
+1. New file addition: create file `hello world\n` at path `docs/README.txt`:
+
+       {
+         "additions" [
+           {
+             "path": "docs/README.txt",
+             "contents": base64encode("hello world\n")
+           }
+         ]
+       }
+
+2. Existing file modification: change existing `docs/README.txt` to have new
+   content `new content here\n`:
+
+       {
+         "additions" [
+           {
+             "path": "docs/README.txt",
+             "contents": base64encode("new content here\n")
+           }
+         ]
+       }
+
+3. Existing file deletion: remove existing file `docs/README.txt`.
+   Note that the path is required to exist -- specifying a
+   path that does not exist on the given branch will abort the
+   commit and return an error.
+
+       {
+         "deletions" [
+           {
+             "path": "docs/README.txt"
+           }
+         ]
+       }
+
+
+4. File rename with no changes: rename `docs/README.txt` with
+   previous content `hello world\n` to the same content at
+   `newdocs/README.txt`:
+
+       {
+         "deletions" [
+           {
+             "path": "docs/README.txt",
+           }
+         ],
+         "additions" [
+           {
+             "path": "newdocs/README.txt",
+             "contents": base64encode("hello world\n")
+           }
+         ]
+       }
+
+
+5. File rename with changes: rename `docs/README.txt` with
+   previous content `hello world\n` to a file at path
+   `newdocs/README.txt` with content `new contents\n`:
+
+       {
+         "deletions" [
+           {
+             "path": "docs/README.txt",
+           }
+         ],
+         "additions" [
+           {
+             "path": "newdocs/README.txt",
+             "contents": base64encode("new contents\n")
+           }
+         ]
+       }
+"""
+input FileChanges {
+  """
+  File to add or change.
+  """
+  additions: [FileAddition!] = []
+
+  """
+  Files to delete.
+  """
+  deletions: [FileDeletion!] = []
+}
+
+"""
+A command to delete the file at the given path as part of a commit.
+"""
+input FileDeletion {
+  """
+  The path to delete
+  """
+  path: String!
+}
+
+"""
+The possible viewed states of a file .
+"""
+enum FileViewedState {
+  """
+  The file has new changes since last viewed.
+  """
+  DISMISSED
+
+  """
+  The file has not been marked as viewed.
+  """
+  UNVIEWED
+
+  """
+  The file has been marked as viewed.
+  """
+  VIEWED
+}
+
+"""
+Autogenerated input type of FollowOrganization
+"""
+input FollowOrganizationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the organization to follow.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of FollowOrganization
+"""
+type FollowOrganizationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The organization that was followed.
+  """
+  organization: Organization
+}
+
+"""
+Autogenerated input type of FollowUser
+"""
+input FollowUserInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the user to follow.
+  """
+  userId: ID! @possibleTypes(concreteTypes: ["User"])
+}
+
+"""
+Autogenerated return type of FollowUser
+"""
+type FollowUserPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The user that was followed.
+  """
+  user: User
+}
+
+"""
+The connection type for User.
+"""
+type FollowerConnection {
+  """
+  A list of edges.
+  """
+  edges: [UserEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+The connection type for User.
+"""
+type FollowingConnection {
+  """
+  A list of edges.
+  """
+  edges: [UserEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+A funding platform link for a repository.
+"""
+type FundingLink {
+  """
+  The funding platform this link is for.
+  """
+  platform: FundingPlatform!
+
+  """
+  The configured URL for this funding link.
+  """
+  url: URI!
+}
+
+"""
+The possible funding platforms for repository funding links.
+"""
+enum FundingPlatform {
+  """
+  Community Bridge funding platform.
+  """
+  COMMUNITY_BRIDGE
+
+  """
+  Custom funding platform.
+  """
+  CUSTOM
+
+  """
+  GitHub funding platform.
+  """
+  GITHUB
+
+  """
+  IssueHunt funding platform.
+  """
+  ISSUEHUNT
+
+  """
+  Ko-fi funding platform.
+  """
+  KO_FI
+
+  """
+  LFX Crowdfunding funding platform.
+  """
+  LFX_CROWDFUNDING
+
+  """
+  Liberapay funding platform.
+  """
+  LIBERAPAY
+
+  """
+  Open Collective funding platform.
+  """
+  OPEN_COLLECTIVE
+
+  """
+  Otechie funding platform.
+  """
+  OTECHIE
+
+  """
+  Patreon funding platform.
+  """
+  PATREON
+
+  """
+  Tidelift funding platform.
+  """
+  TIDELIFT
+}
+
+"""
+A generic hovercard context with a message and icon
+"""
+type GenericHovercardContext implements HovercardContext {
+  """
+  A string describing this context
+  """
+  message: String!
+
+  """
+  An octicon to accompany this context
+  """
+  octicon: String!
+}
+
+"""
+A Gist.
+"""
+type Gist implements Node & Starrable & UniformResourceLocatable {
+  """
+  A list of comments associated with the gist
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): GistCommentConnection!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The gist description.
+  """
+  description: String
+
+  """
+  The files in this gist.
+  """
+  files(
+    """
+    The maximum number of files to return.
+    """
+    limit: Int = 10
+
+    """
+    The oid of the files to return
+    """
+    oid: GitObjectID
+  ): [GistFile]
+
+  """
+  A list of forks associated with the gist
+  """
+  forks(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for gists returned from the connection
+    """
+    orderBy: GistOrder
+  ): GistConnection!
+
+  """
+  The Node ID of the Gist object
+  """
+  id: ID!
+
+  """
+  Identifies if the gist is a fork.
+  """
+  isFork: Boolean!
+
+  """
+  Whether the gist is public or not.
+  """
+  isPublic: Boolean!
+
+  """
+  The gist name.
+  """
+  name: String!
+
+  """
+  The gist owner.
+  """
+  owner: RepositoryOwner
+
+  """
+  Identifies when the gist was last pushed to.
+  """
+  pushedAt: DateTime
+
+  """
+  The HTML path to this resource.
+  """
+  resourcePath: URI!
+
+  """
+  Returns a count of how many stargazers there are on this object
+  """
+  stargazerCount: Int!
+
+  """
+  A list of users who have starred this starrable.
+  """
+  stargazers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: StarOrder
+  ): StargazerConnection!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this Gist.
+  """
+  url: URI!
+
+  """
+  Returns a boolean indicating whether the viewing user has starred this starrable.
+  """
+  viewerHasStarred: Boolean!
+}
+
+"""
+Represents a comment on an Gist.
+"""
+type GistComment implements Comment & Deletable & Minimizable & Node & Updatable & UpdatableComment {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the gist.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  Identifies the comment body.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The associated gist.
+  """
+  gist: Gist!
+
+  """
+  The Node ID of the GistComment object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Returns whether or not a comment has been minimized.
+  """
+  isMinimized: Boolean!
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Returns why the comment was minimized. One of `abuse`, `off-topic`,
+  `outdated`, `resolved`, `duplicate` and `spam`. Note that the case and
+  formatting of these values differs from the inputs to the `MinimizeComment` mutation.
+  """
+  minimizedReason: String
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Check if the current viewer can minimize this object.
+  """
+  viewerCanMinimize: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+}
+
+"""
+The connection type for GistComment.
+"""
+type GistCommentConnection {
+  """
+  A list of edges.
+  """
+  edges: [GistCommentEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [GistComment]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type GistCommentEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: GistComment
+}
+
+"""
+The connection type for Gist.
+"""
+type GistConnection {
+  """
+  A list of edges.
+  """
+  edges: [GistEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Gist]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type GistEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Gist
+}
+
+"""
+A file in a gist.
+"""
+type GistFile {
+  """
+  The file name encoded to remove characters that are invalid in URL paths.
+  """
+  encodedName: String
+
+  """
+  The gist file encoding.
+  """
+  encoding: String
+
+  """
+  The file extension from the file name.
+  """
+  extension: String
+
+  """
+  Indicates if this file is an image.
+  """
+  isImage: Boolean!
+
+  """
+  Whether the file's contents were truncated.
+  """
+  isTruncated: Boolean!
+
+  """
+  The programming language this file is written in.
+  """
+  language: Language
+
+  """
+  The gist file name.
+  """
+  name: String
+
+  """
+  The gist file size in bytes.
+  """
+  size: Int
+
+  """
+  UTF8 text data or null if the file is binary
+  """
+  text(
+    """
+    Optionally truncate the returned file to this length.
+    """
+    truncate: Int
+  ): String
+}
+
+"""
+Ordering options for gist connections
+"""
+input GistOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order repositories by.
+  """
+  field: GistOrderField!
+}
+
+"""
+Properties by which gist connections can be ordered.
+"""
+enum GistOrderField {
+  """
+  Order gists by creation time
+  """
+  CREATED_AT
+
+  """
+  Order gists by push time
+  """
+  PUSHED_AT
+
+  """
+  Order gists by update time
+  """
+  UPDATED_AT
+}
+
+"""
+The privacy of a Gist
+"""
+enum GistPrivacy {
+  """
+  Gists that are public and secret
+  """
+  ALL
+
+  """
+  Public
+  """
+  PUBLIC
+
+  """
+  Secret
+  """
+  SECRET
+}
+
+"""
+Represents an actor in a Git commit (ie. an author or committer).
+"""
+type GitActor {
+  """
+  A URL pointing to the author's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  The timestamp of the Git action (authoring or committing).
+  """
+  date: GitTimestamp
+
+  """
+  The email in the Git commit.
+  """
+  email: String
+
+  """
+  The name in the Git commit.
+  """
+  name: String
+
+  """
+  The GitHub user corresponding to the email field. Null if no such user exists.
+  """
+  user: User
+}
+
+"""
+The connection type for GitActor.
+"""
+type GitActorConnection {
+  """
+  A list of edges.
+  """
+  edges: [GitActorEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [GitActor]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type GitActorEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: GitActor
+}
+
+"""
+Represents information about the GitHub instance.
+"""
+type GitHubMetadata {
+  """
+  Returns a String that's a SHA of `github-services`
+  """
+  gitHubServicesSha: GitObjectID!
+
+  """
+  IP addresses that users connect to for git operations
+  """
+  gitIpAddresses: [String!]
+
+  """
+  IP addresses that GitHub Enterprise Importer uses for outbound connections
+  """
+  githubEnterpriseImporterIpAddresses: [String!]
+
+  """
+  IP addresses that service hooks are sent from
+  """
+  hookIpAddresses: [String!]
+
+  """
+  IP addresses that the importer connects from
+  """
+  importerIpAddresses: [String!]
+
+  """
+  Whether or not users are verified
+  """
+  isPasswordAuthenticationVerifiable: Boolean!
+
+  """
+  IP addresses for GitHub Pages' A records
+  """
+  pagesIpAddresses: [String!]
+}
+
+"""
+Represents a Git object.
+"""
+interface GitObject {
+  """
+  An abbreviated version of the Git object ID
+  """
+  abbreviatedOid: String!
+
+  """
+  The HTTP path for this Git object
+  """
+  commitResourcePath: URI!
+
+  """
+  The HTTP URL for this Git object
+  """
+  commitUrl: URI!
+
+  """
+  The Node ID of the GitObject object
+  """
+  id: ID!
+
+  """
+  The Git object ID
+  """
+  oid: GitObjectID!
+
+  """
+  The Repository the Git object belongs to
+  """
+  repository: Repository!
+}
+
+"""
+A Git object ID.
+"""
+scalar GitObjectID
+
+"""
+A fully qualified reference name (e.g. `refs/heads/master`).
+"""
+scalar GitRefname @preview(toggledBy: "update-refs-preview")
+
+"""
+Git SSH string
+"""
+scalar GitSSHRemote
+
+"""
+Information about a signature (GPG or S/MIME) on a Commit or Tag.
+"""
+interface GitSignature {
+  """
+  Email used to sign this object.
+  """
+  email: String!
+
+  """
+  True if the signature is valid and verified by GitHub.
+  """
+  isValid: Boolean!
+
+  """
+  Payload for GPG signing object. Raw ODB object without the signature header.
+  """
+  payload: String!
+
+  """
+  ASCII-armored signature header from object.
+  """
+  signature: String!
+
+  """
+  GitHub user corresponding to the email signing this commit.
+  """
+  signer: User
+
+  """
+  The state of this signature. `VALID` if signature is valid and verified by
+  GitHub, otherwise represents reason why signature is considered invalid.
+  """
+  state: GitSignatureState!
+
+  """
+  True if the signature was made with GitHub's signing key.
+  """
+  wasSignedByGitHub: Boolean!
+}
+
+"""
+The state of a Git signature.
+"""
+enum GitSignatureState {
+  """
+  The signing certificate or its chain could not be verified
+  """
+  BAD_CERT
+
+  """
+  Invalid email used for signing
+  """
+  BAD_EMAIL
+
+  """
+  Signing key expired
+  """
+  EXPIRED_KEY
+
+  """
+  Internal error - the GPG verification service misbehaved
+  """
+  GPGVERIFY_ERROR
+
+  """
+  Internal error - the GPG verification service is unavailable at the moment
+  """
+  GPGVERIFY_UNAVAILABLE
+
+  """
+  Invalid signature
+  """
+  INVALID
+
+  """
+  Malformed signature
+  """
+  MALFORMED_SIG
+
+  """
+  The usage flags for the key that signed this don't allow signing
+  """
+  NOT_SIGNING_KEY
+
+  """
+  Email used for signing not known to GitHub
+  """
+  NO_USER
+
+  """
+  Valid signature, though certificate revocation check failed
+  """
+  OCSP_ERROR
+
+  """
+  Valid signature, pending certificate revocation checking
+  """
+  OCSP_PENDING
+
+  """
+  One or more certificates in chain has been revoked
+  """
+  OCSP_REVOKED
+
+  """
+  Key used for signing not known to GitHub
+  """
+  UNKNOWN_KEY
+
+  """
+  Unknown signature type
+  """
+  UNKNOWN_SIG_TYPE
+
+  """
+  Unsigned
+  """
+  UNSIGNED
+
+  """
+  Email used for signing unverified on GitHub
+  """
+  UNVERIFIED_EMAIL
+
+  """
+  Valid signature and verified by GitHub
+  """
+  VALID
+}
+
+"""
+An ISO-8601 encoded date string. Unlike the DateTime type, GitTimestamp is not converted in UTC.
+"""
+scalar GitTimestamp
+
+"""
+Represents a GPG signature on a Commit or Tag.
+"""
+type GpgSignature implements GitSignature {
+  """
+  Email used to sign this object.
+  """
+  email: String!
+
+  """
+  True if the signature is valid and verified by GitHub.
+  """
+  isValid: Boolean!
+
+  """
+  Hex-encoded ID of the key that signed this object.
+  """
+  keyId: String
+
+  """
+  Payload for GPG signing object. Raw ODB object without the signature header.
+  """
+  payload: String!
+
+  """
+  ASCII-armored signature header from object.
+  """
+  signature: String!
+
+  """
+  GitHub user corresponding to the email signing this commit.
+  """
+  signer: User
+
+  """
+  The state of this signature. `VALID` if signature is valid and verified by
+  GitHub, otherwise represents reason why signature is considered invalid.
+  """
+  state: GitSignatureState!
+
+  """
+  True if the signature was made with GitHub's signing key.
+  """
+  wasSignedByGitHub: Boolean!
+}
+
+"""
+Autogenerated input type of GrantEnterpriseOrganizationsMigratorRole
+"""
+input GrantEnterpriseOrganizationsMigratorRoleInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise to which all organizations managed by it will be granted the migrator role.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The login of the user to grant the migrator role
+  """
+  login: String!
+}
+
+"""
+Autogenerated return type of GrantEnterpriseOrganizationsMigratorRole
+"""
+type GrantEnterpriseOrganizationsMigratorRolePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The organizations that had the migrator role applied to for the given user.
+  """
+  organizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): OrganizationConnection
+}
+
+"""
+Autogenerated input type of GrantMigratorRole
+"""
+input GrantMigratorRoleInput {
+  """
+  The user login or Team slug to grant the migrator role.
+  """
+  actor: String!
+
+  """
+  Specifies the type of the actor, can be either USER or TEAM.
+  """
+  actorType: ActorType!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the organization that the user/team belongs to.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of GrantMigratorRole
+"""
+type GrantMigratorRolePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Did the operation succeed?
+  """
+  success: Boolean
+}
+
+"""
+A string containing HTML code.
+"""
+scalar HTML
+
+"""
+Represents a 'head_ref_deleted' event on a given pull request.
+"""
+type HeadRefDeletedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the Ref associated with the `head_ref_deleted` event.
+  """
+  headRef: Ref
+
+  """
+  Identifies the name of the Ref associated with the `head_ref_deleted` event.
+  """
+  headRefName: String!
+
+  """
+  The Node ID of the HeadRefDeletedEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+}
+
+"""
+Represents a 'head_ref_force_pushed' event on a given pull request.
+"""
+type HeadRefForcePushedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the after commit SHA for the 'head_ref_force_pushed' event.
+  """
+  afterCommit: Commit
+
+  """
+  Identifies the before commit SHA for the 'head_ref_force_pushed' event.
+  """
+  beforeCommit: Commit
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the HeadRefForcePushedEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  Identifies the fully qualified ref name for the 'head_ref_force_pushed' event.
+  """
+  ref: Ref
+}
+
+"""
+Represents a 'head_ref_restored' event on a given pull request.
+"""
+type HeadRefRestoredEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the HeadRefRestoredEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+}
+
+"""
+Detail needed to display a hovercard for a user
+"""
+type Hovercard {
+  """
+  Each of the contexts for this hovercard
+  """
+  contexts: [HovercardContext!]!
+}
+
+"""
+An individual line of a hovercard
+"""
+interface HovercardContext {
+  """
+  A string describing this context
+  """
+  message: String!
+
+  """
+  An octicon to accompany this context
+  """
+  octicon: String!
+}
+
+"""
+The possible states in which authentication can be configured with an identity provider.
+"""
+enum IdentityProviderConfigurationState {
+  """
+  Authentication with an identity provider is configured but not enforced.
+  """
+  CONFIGURED
+
+  """
+  Authentication with an identity provider is configured and enforced.
+  """
+  ENFORCED
+
+  """
+  Authentication with an identity provider is not configured.
+  """
+  UNCONFIGURED
+}
+
+"""
+Autogenerated input type of ImportProject
+"""
+input ImportProjectInput {
+  """
+  The description of Project.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A list of columns containing issues and pull requests.
+  """
+  columnImports: [ProjectColumnImport!]!
+
+  """
+  The name of Project.
+  """
+  name: String!
+
+  """
+  The name of the Organization or User to create the Project under.
+  """
+  ownerName: String!
+
+  """
+  Whether the Project is public or not.
+  """
+  public: Boolean = false
+}
+
+"""
+Autogenerated return type of ImportProject
+"""
+type ImportProjectPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new Project!
+  """
+  project: Project
+}
+
+"""
+Autogenerated input type of InviteEnterpriseAdmin
+"""
+input InviteEnterpriseAdminInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The email of the person to invite as an administrator.
+  """
+  email: String
+
+  """
+  The ID of the enterprise to which you want to invite an administrator.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The login of a user to invite as an administrator.
+  """
+  invitee: String
+
+  """
+  The role of the administrator.
+  """
+  role: EnterpriseAdministratorRole
+}
+
+"""
+Autogenerated return type of InviteEnterpriseAdmin
+"""
+type InviteEnterpriseAdminPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The created enterprise administrator invitation.
+  """
+  invitation: EnterpriseAdministratorInvitation
+}
+
+"""
+The possible values for the IP allow list enabled setting.
+"""
+enum IpAllowListEnabledSettingValue {
+  """
+  The setting is disabled for the owner.
+  """
+  DISABLED
+
+  """
+  The setting is enabled for the owner.
+  """
+  ENABLED
+}
+
+"""
+An IP address or range of addresses that is allowed to access an owner's resources.
+"""
+type IpAllowListEntry implements Node {
+  """
+  A single IP address or range of IP addresses in CIDR notation.
+  """
+  allowListValue: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the IpAllowListEntry object
+  """
+  id: ID!
+
+  """
+  Whether the entry is currently active.
+  """
+  isActive: Boolean!
+
+  """
+  The name of the IP allow list entry.
+  """
+  name: String
+
+  """
+  The owner of the IP allow list entry.
+  """
+  owner: IpAllowListOwner!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for IpAllowListEntry.
+"""
+type IpAllowListEntryConnection {
+  """
+  A list of edges.
+  """
+  edges: [IpAllowListEntryEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [IpAllowListEntry]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type IpAllowListEntryEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: IpAllowListEntry
+}
+
+"""
+Ordering options for IP allow list entry connections.
+"""
+input IpAllowListEntryOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order IP allow list entries by.
+  """
+  field: IpAllowListEntryOrderField!
+}
+
+"""
+Properties by which IP allow list entry connections can be ordered.
+"""
+enum IpAllowListEntryOrderField {
+  """
+  Order IP allow list entries by the allow list value.
+  """
+  ALLOW_LIST_VALUE
+
+  """
+  Order IP allow list entries by creation time.
+  """
+  CREATED_AT
+}
+
+"""
+The possible values for the IP allow list configuration for installed GitHub Apps setting.
+"""
+enum IpAllowListForInstalledAppsEnabledSettingValue {
+  """
+  The setting is disabled for the owner.
+  """
+  DISABLED
+
+  """
+  The setting is enabled for the owner.
+  """
+  ENABLED
+}
+
+"""
+Types that can own an IP allow list.
+"""
+union IpAllowListOwner = App | Enterprise | Organization
+
+"""
+An Issue is a place to discuss ideas, enhancements, tasks, and bugs for a project.
+"""
+type Issue implements Assignable & Closable & Comment & Deletable & Labelable & Lockable & Node & ProjectV2Owner & Reactable & RepositoryNode & Subscribable & SubscribableThread & UniformResourceLocatable & Updatable & UpdatableComment {
+  """
+  Reason that the conversation was locked.
+  """
+  activeLockReason: LockReason
+
+  """
+  A list of Users assigned to this object.
+  """
+  assignees(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  Identifies the body of the issue.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The http path for this issue body
+  """
+  bodyResourcePath: URI!
+
+  """
+  Identifies the body of the issue rendered to text.
+  """
+  bodyText: String!
+
+  """
+  The http URL for this issue body
+  """
+  bodyUrl: URI!
+
+  """
+  Indicates if the object is closed (definition of closed may depend on type)
+  """
+  closed: Boolean!
+
+  """
+  Identifies the date and time when the object was closed.
+  """
+  closedAt: DateTime
+
+  """
+  A list of comments associated with the Issue.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for issue comments returned from the connection.
+    """
+    orderBy: IssueCommentOrder
+  ): IssueCommentConnection!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  Identifies the primary key from the database as a BigInt.
+  """
+  fullDatabaseId: BigInt
+
+  """
+  The hovercard information for this issue
+  """
+  hovercard(
+    """
+    Whether or not to include notification contexts
+    """
+    includeNotificationContexts: Boolean = true
+  ): Hovercard!
+
+  """
+  The Node ID of the Issue object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Indicates whether or not this issue is currently pinned to the repository issues list
+  """
+  isPinned: Boolean
+
+  """
+  Is this issue read by the viewer
+  """
+  isReadByViewer: Boolean
+
+  """
+  A list of labels associated with the object.
+  """
+  labels(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for labels returned from the connection.
+    """
+    orderBy: LabelOrder = {field: CREATED_AT, direction: ASC}
+  ): LabelConnection
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Branches linked to this issue.
+  """
+  linkedBranches(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): LinkedBranchConnection!
+
+  """
+  `true` if the object is locked
+  """
+  locked: Boolean!
+
+  """
+  Identifies the milestone associated with the issue.
+  """
+  milestone: Milestone
+
+  """
+  Identifies the issue number.
+  """
+  number: Int!
+
+  """
+  A list of Users that are participating in the Issue conversation.
+  """
+  participants(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  List of project cards associated with this issue.
+  """
+  projectCards(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    A list of archived states to filter the cards by
+    """
+    archivedStates: [ProjectCardArchivedState] = [ARCHIVED, NOT_ARCHIVED]
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectCardConnection!
+
+  """
+  List of project items associated with this issue.
+  """
+  projectItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Include archived items.
+    """
+    includeArchived: Boolean = true
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2ItemConnection!
+
+  """
+  Find a project by number.
+  """
+  projectV2(
+    """
+    The project number.
+    """
+    number: Int!
+  ): ProjectV2
+
+  """
+  A list of projects under the owner.
+  """
+  projectsV2(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    How to order the returned projects.
+    """
+    orderBy: ProjectV2Order = {field: NUMBER, direction: DESC}
+
+    """
+    A project to search for under the the owner.
+    """
+    query: String
+  ): ProjectV2Connection!
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this issue
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the state of the issue.
+  """
+  state: IssueState!
+
+  """
+  Identifies the reason for the issue state.
+  """
+  stateReason: IssueStateReason
+
+  """
+  A list of events, comments, commits, etc. associated with the issue.
+  """
+  timeline(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows filtering timeline events by a `since` timestamp.
+    """
+    since: DateTime
+  ): IssueTimelineConnection!
+    @deprecated(reason: "`timeline` will be removed Use Issue.timelineItems instead. Removal on 2020-10-01 UTC.")
+
+  """
+  A list of events, comments, commits, etc. associated with the issue.
+  """
+  timelineItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Filter timeline items by type.
+    """
+    itemTypes: [IssueTimelineItemsItemType!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter timeline items by a `since` timestamp.
+    """
+    since: DateTime
+
+    """
+    Skips the first _n_ elements in the list.
+    """
+    skip: Int
+  ): IssueTimelineItemsConnection!
+
+  """
+  Identifies the issue title.
+  """
+  title: String!
+
+  """
+  Identifies the issue title rendered to HTML.
+  """
+  titleHTML: String!
+
+  """
+  A list of issues that track this issue
+  """
+  trackedInIssues(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): IssueConnection!
+
+  """
+  A list of issues tracked inside the current issue
+  """
+  trackedIssues(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): IssueConnection!
+
+  """
+  The number of tracked issues for this issue
+  """
+  trackedIssuesCount(
+    """
+    Limit the count to tracked issues with the specified states.
+    """
+    states: [TrackedIssueStates]
+  ): Int!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this issue
+  """
+  url: URI!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Indicates if the object can be closed by the viewer.
+  """
+  viewerCanClose: Boolean!
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Indicates if the object can be reopened by the viewer.
+  """
+  viewerCanReopen: Boolean!
+
+  """
+  Check if the viewer is able to change their subscription status for the repository.
+  """
+  viewerCanSubscribe: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+
+  """
+  Identifies if the viewer is watching, not watching, or ignoring the subscribable entity.
+  """
+  viewerSubscription: SubscriptionState
+
+  """
+  Identifies the viewer's thread subscription form action.
+  """
+  viewerThreadSubscriptionFormAction: ThreadSubscriptionFormAction
+
+  """
+  Identifies the viewer's thread subscription status.
+  """
+  viewerThreadSubscriptionStatus: ThreadSubscriptionState
+}
+
+"""
+The possible state reasons of a closed issue.
+"""
+enum IssueClosedStateReason {
+  """
+  An issue that has been closed as completed
+  """
+  COMPLETED
+
+  """
+  An issue that has been closed as not planned
+  """
+  NOT_PLANNED
+}
+
+"""
+Represents a comment on an Issue.
+"""
+type IssueComment implements Comment & Deletable & Minimizable & Node & Reactable & RepositoryNode & Updatable & UpdatableComment {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  The body as Markdown.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  Identifies the primary key from the database as a BigInt.
+  """
+  fullDatabaseId: BigInt
+
+  """
+  The Node ID of the IssueComment object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Returns whether or not a comment has been minimized.
+  """
+  isMinimized: Boolean!
+
+  """
+  Identifies the issue associated with the comment.
+  """
+  issue: Issue!
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Returns why the comment was minimized. One of `abuse`, `off-topic`,
+  `outdated`, `resolved`, `duplicate` and `spam`. Note that the case and
+  formatting of these values differs from the inputs to the `MinimizeComment` mutation.
+  """
+  minimizedReason: String
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  Returns the pull request associated with the comment, if this comment was made on a
+  pull request.
+  """
+  pullRequest: PullRequest
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this issue comment
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this issue comment
+  """
+  url: URI!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Check if the current viewer can minimize this object.
+  """
+  viewerCanMinimize: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+}
+
+"""
+The connection type for IssueComment.
+"""
+type IssueCommentConnection {
+  """
+  A list of edges.
+  """
+  edges: [IssueCommentEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [IssueComment]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type IssueCommentEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: IssueComment
+}
+
+"""
+Ways in which lists of issue comments can be ordered upon return.
+"""
+input IssueCommentOrder {
+  """
+  The direction in which to order issue comments by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order issue comments by.
+  """
+  field: IssueCommentOrderField!
+}
+
+"""
+Properties by which issue comment connections can be ordered.
+"""
+enum IssueCommentOrderField {
+  """
+  Order issue comments by update time
+  """
+  UPDATED_AT
+}
+
+"""
+The connection type for Issue.
+"""
+type IssueConnection {
+  """
+  A list of edges.
+  """
+  edges: [IssueEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Issue]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+This aggregates issues opened by a user within one repository.
+"""
+type IssueContributionsByRepository {
+  """
+  The issue contributions.
+  """
+  contributions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for contributions returned from the connection.
+    """
+    orderBy: ContributionOrder = {direction: DESC}
+  ): CreatedIssueContributionConnection!
+
+  """
+  The repository in which the issues were opened.
+  """
+  repository: Repository!
+}
+
+"""
+An edge in a connection.
+"""
+type IssueEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Issue
+}
+
+"""
+Ways in which to filter lists of issues.
+"""
+input IssueFilters {
+  """
+  List issues assigned to given name. Pass in `null` for issues with no assigned
+  user, and `*` for issues assigned to any user.
+  """
+  assignee: String
+
+  """
+  List issues created by given name.
+  """
+  createdBy: String
+
+  """
+  List issues where the list of label names exist on the issue.
+  """
+  labels: [String!]
+
+  """
+  List issues where the given name is mentioned in the issue.
+  """
+  mentioned: String
+
+  """
+  List issues by given milestone argument. If an string representation of an
+  integer is passed, it should refer to a milestone by its database ID. Pass in
+  `null` for issues with no milestone, and `*` for issues that are assigned to any milestone.
+  """
+  milestone: String
+
+  """
+  List issues by given milestone argument. If an string representation of an
+  integer is passed, it should refer to a milestone by its number field. Pass in
+  `null` for issues with no milestone, and `*` for issues that are assigned to any milestone.
+  """
+  milestoneNumber: String
+
+  """
+  List issues that have been updated at or after the given date.
+  """
+  since: DateTime
+
+  """
+  List issues filtered by the list of states given.
+  """
+  states: [IssueState!]
+
+  """
+  List issues subscribed to by viewer.
+  """
+  viewerSubscribed: Boolean = false
+}
+
+"""
+Used for return value of Repository.issueOrPullRequest.
+"""
+union IssueOrPullRequest = Issue | PullRequest
+
+"""
+Ways in which lists of issues can be ordered upon return.
+"""
+input IssueOrder {
+  """
+  The direction in which to order issues by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order issues by.
+  """
+  field: IssueOrderField!
+}
+
+"""
+Properties by which issue connections can be ordered.
+"""
+enum IssueOrderField {
+  """
+  Order issues by comment count
+  """
+  COMMENTS
+
+  """
+  Order issues by creation time
+  """
+  CREATED_AT
+
+  """
+  Order issues by update time
+  """
+  UPDATED_AT
+}
+
+"""
+The possible states of an issue.
+"""
+enum IssueState {
+  """
+  An issue that has been closed
+  """
+  CLOSED
+
+  """
+  An issue that is still open
+  """
+  OPEN
+}
+
+"""
+The possible state reasons of an issue.
+"""
+enum IssueStateReason {
+  """
+  An issue that has been closed as completed
+  """
+  COMPLETED
+
+  """
+  An issue that has been closed as not planned
+  """
+  NOT_PLANNED
+
+  """
+  An issue that has been reopened
+  """
+  REOPENED
+}
+
+"""
+A repository issue template.
+"""
+type IssueTemplate {
+  """
+  The template purpose.
+  """
+  about: String
+
+  """
+  The suggested assignees.
+  """
+  assignees(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  The suggested issue body.
+  """
+  body: String
+
+  """
+  The template filename.
+  """
+  filename: String!
+
+  """
+  The suggested issue labels
+  """
+  labels(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for labels returned from the connection.
+    """
+    orderBy: LabelOrder = {field: CREATED_AT, direction: ASC}
+  ): LabelConnection
+
+  """
+  The template name.
+  """
+  name: String!
+
+  """
+  The suggested issue title.
+  """
+  title: String
+}
+
+"""
+The connection type for IssueTimelineItem.
+"""
+type IssueTimelineConnection {
+  """
+  A list of edges.
+  """
+  edges: [IssueTimelineItemEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [IssueTimelineItem]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An item in an issue timeline
+"""
+union IssueTimelineItem =
+    AssignedEvent
+  | ClosedEvent
+  | Commit
+  | CrossReferencedEvent
+  | DemilestonedEvent
+  | IssueComment
+  | LabeledEvent
+  | LockedEvent
+  | MilestonedEvent
+  | ReferencedEvent
+  | RenamedTitleEvent
+  | ReopenedEvent
+  | SubscribedEvent
+  | TransferredEvent
+  | UnassignedEvent
+  | UnlabeledEvent
+  | UnlockedEvent
+  | UnsubscribedEvent
+  | UserBlockedEvent
+
+"""
+An edge in a connection.
+"""
+type IssueTimelineItemEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: IssueTimelineItem
+}
+
+"""
+An item in an issue timeline
+"""
+union IssueTimelineItems =
+    AddedToProjectEvent
+  | AssignedEvent
+  | ClosedEvent
+  | CommentDeletedEvent
+  | ConnectedEvent
+  | ConvertedNoteToIssueEvent
+  | ConvertedToDiscussionEvent
+  | CrossReferencedEvent
+  | DemilestonedEvent
+  | DisconnectedEvent
+  | IssueComment
+  | LabeledEvent
+  | LockedEvent
+  | MarkedAsDuplicateEvent
+  | MentionedEvent
+  | MilestonedEvent
+  | MovedColumnsInProjectEvent
+  | PinnedEvent
+  | ReferencedEvent
+  | RemovedFromProjectEvent
+  | RenamedTitleEvent
+  | ReopenedEvent
+  | SubscribedEvent
+  | TransferredEvent
+  | UnassignedEvent
+  | UnlabeledEvent
+  | UnlockedEvent
+  | UnmarkedAsDuplicateEvent
+  | UnpinnedEvent
+  | UnsubscribedEvent
+  | UserBlockedEvent
+
+"""
+The connection type for IssueTimelineItems.
+"""
+type IssueTimelineItemsConnection {
+  """
+  A list of edges.
+  """
+  edges: [IssueTimelineItemsEdge]
+
+  """
+  Identifies the count of items after applying `before` and `after` filters.
+  """
+  filteredCount: Int!
+
+  """
+  A list of nodes.
+  """
+  nodes: [IssueTimelineItems]
+
+  """
+  Identifies the count of items after applying `before`/`after` filters and `first`/`last`/`skip` slicing.
+  """
+  pageCount: Int!
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+
+  """
+  Identifies the date and time when the timeline was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+An edge in a connection.
+"""
+type IssueTimelineItemsEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: IssueTimelineItems
+}
+
+"""
+The possible item types found in a timeline.
+"""
+enum IssueTimelineItemsItemType {
+  """
+  Represents a 'added_to_project' event on a given issue or pull request.
+  """
+  ADDED_TO_PROJECT_EVENT
+
+  """
+  Represents an 'assigned' event on any assignable object.
+  """
+  ASSIGNED_EVENT
+
+  """
+  Represents a 'closed' event on any `Closable`.
+  """
+  CLOSED_EVENT
+
+  """
+  Represents a 'comment_deleted' event on a given issue or pull request.
+  """
+  COMMENT_DELETED_EVENT
+
+  """
+  Represents a 'connected' event on a given issue or pull request.
+  """
+  CONNECTED_EVENT
+
+  """
+  Represents a 'converted_note_to_issue' event on a given issue or pull request.
+  """
+  CONVERTED_NOTE_TO_ISSUE_EVENT
+
+  """
+  Represents a 'converted_to_discussion' event on a given issue.
+  """
+  CONVERTED_TO_DISCUSSION_EVENT
+
+  """
+  Represents a mention made by one issue or pull request to another.
+  """
+  CROSS_REFERENCED_EVENT
+
+  """
+  Represents a 'demilestoned' event on a given issue or pull request.
+  """
+  DEMILESTONED_EVENT
+
+  """
+  Represents a 'disconnected' event on a given issue or pull request.
+  """
+  DISCONNECTED_EVENT
+
+  """
+  Represents a comment on an Issue.
+  """
+  ISSUE_COMMENT
+
+  """
+  Represents a 'labeled' event on a given issue or pull request.
+  """
+  LABELED_EVENT
+
+  """
+  Represents a 'locked' event on a given issue or pull request.
+  """
+  LOCKED_EVENT
+
+  """
+  Represents a 'marked_as_duplicate' event on a given issue or pull request.
+  """
+  MARKED_AS_DUPLICATE_EVENT
+
+  """
+  Represents a 'mentioned' event on a given issue or pull request.
+  """
+  MENTIONED_EVENT
+
+  """
+  Represents a 'milestoned' event on a given issue or pull request.
+  """
+  MILESTONED_EVENT
+
+  """
+  Represents a 'moved_columns_in_project' event on a given issue or pull request.
+  """
+  MOVED_COLUMNS_IN_PROJECT_EVENT
+
+  """
+  Represents a 'pinned' event on a given issue or pull request.
+  """
+  PINNED_EVENT
+
+  """
+  Represents a 'referenced' event on a given `ReferencedSubject`.
+  """
+  REFERENCED_EVENT
+
+  """
+  Represents a 'removed_from_project' event on a given issue or pull request.
+  """
+  REMOVED_FROM_PROJECT_EVENT
+
+  """
+  Represents a 'renamed' event on a given issue or pull request
+  """
+  RENAMED_TITLE_EVENT
+
+  """
+  Represents a 'reopened' event on any `Closable`.
+  """
+  REOPENED_EVENT
+
+  """
+  Represents a 'subscribed' event on a given `Subscribable`.
+  """
+  SUBSCRIBED_EVENT
+
+  """
+  Represents a 'transferred' event on a given issue or pull request.
+  """
+  TRANSFERRED_EVENT
+
+  """
+  Represents an 'unassigned' event on any assignable object.
+  """
+  UNASSIGNED_EVENT
+
+  """
+  Represents an 'unlabeled' event on a given issue or pull request.
+  """
+  UNLABELED_EVENT
+
+  """
+  Represents an 'unlocked' event on a given issue or pull request.
+  """
+  UNLOCKED_EVENT
+
+  """
+  Represents an 'unmarked_as_duplicate' event on a given issue or pull request.
+  """
+  UNMARKED_AS_DUPLICATE_EVENT
+
+  """
+  Represents an 'unpinned' event on a given issue or pull request.
+  """
+  UNPINNED_EVENT
+
+  """
+  Represents an 'unsubscribed' event on a given `Subscribable`.
+  """
+  UNSUBSCRIBED_EVENT
+
+  """
+  Represents a 'user_blocked' event on a given user.
+  """
+  USER_BLOCKED_EVENT
+}
+
+"""
+Represents a user signing up for a GitHub account.
+"""
+type JoinedGitHubContribution implements Contribution {
+  """
+  Whether this contribution is associated with a record you do not have access to. For
+  example, your own 'first issue' contribution may have been made on a repository you can no
+  longer access.
+  """
+  isRestricted: Boolean!
+
+  """
+  When this contribution was made.
+  """
+  occurredAt: DateTime!
+
+  """
+  The HTTP path for this contribution.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this contribution.
+  """
+  url: URI!
+
+  """
+  The user who made this contribution.
+  """
+  user: User!
+}
+
+"""
+A label for categorizing Issues, Pull Requests, Milestones, or Discussions with a given Repository.
+"""
+type Label implements Node {
+  """
+  Identifies the label color.
+  """
+  color: String!
+
+  """
+  Identifies the date and time when the label was created.
+  """
+  createdAt: DateTime
+
+  """
+  A brief description of this label.
+  """
+  description: String
+
+  """
+  The Node ID of the Label object
+  """
+  id: ID!
+
+  """
+  Indicates whether or not this is a default label.
+  """
+  isDefault: Boolean!
+
+  """
+  A list of issues associated with this label.
+  """
+  issues(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filtering options for issues returned from the connection.
+    """
+    filterBy: IssueFilters
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for issues returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the issues by.
+    """
+    states: [IssueState!]
+  ): IssueConnection!
+
+  """
+  Identifies the label name.
+  """
+  name: String!
+
+  """
+  A list of pull requests associated with this label.
+  """
+  pullRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    The base ref name to filter the pull requests by.
+    """
+    baseRefName: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    The head ref name to filter the pull requests by.
+    """
+    headRefName: String
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pull requests returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the pull requests by.
+    """
+    states: [PullRequestState!]
+  ): PullRequestConnection!
+
+  """
+  The repository associated with this label.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this label.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the label was last updated.
+  """
+  updatedAt: DateTime
+
+  """
+  The HTTP URL for this label.
+  """
+  url: URI!
+}
+
+"""
+The connection type for Label.
+"""
+type LabelConnection {
+  """
+  A list of edges.
+  """
+  edges: [LabelEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Label]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type LabelEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Label
+}
+
+"""
+Ways in which lists of labels can be ordered upon return.
+"""
+input LabelOrder {
+  """
+  The direction in which to order labels by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order labels by.
+  """
+  field: LabelOrderField!
+}
+
+"""
+Properties by which label connections can be ordered.
+"""
+enum LabelOrderField {
+  """
+  Order labels by creation time
+  """
+  CREATED_AT
+
+  """
+  Order labels by name
+  """
+  NAME
+}
+
+"""
+An object that can have labels assigned to it.
+"""
+interface Labelable {
+  """
+  A list of labels associated with the object.
+  """
+  labels(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for labels returned from the connection.
+    """
+    orderBy: LabelOrder = {field: CREATED_AT, direction: ASC}
+  ): LabelConnection
+}
+
+"""
+Represents a 'labeled' event on a given issue or pull request.
+"""
+type LabeledEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the LabeledEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the label associated with the 'labeled' event.
+  """
+  label: Label!
+
+  """
+  Identifies the `Labelable` associated with the event.
+  """
+  labelable: Labelable!
+}
+
+"""
+Represents a given language found in repositories.
+"""
+type Language implements Node {
+  """
+  The color defined for the current language.
+  """
+  color: String
+
+  """
+  The Node ID of the Language object
+  """
+  id: ID!
+
+  """
+  The name of the current language.
+  """
+  name: String!
+}
+
+"""
+A list of languages associated with the parent.
+"""
+type LanguageConnection {
+  """
+  A list of edges.
+  """
+  edges: [LanguageEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Language]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+
+  """
+  The total size in bytes of files written in that language.
+  """
+  totalSize: Int!
+}
+
+"""
+Represents the language of a repository.
+"""
+type LanguageEdge {
+  cursor: String!
+  node: Language!
+
+  """
+  The number of bytes of code written in the language.
+  """
+  size: Int!
+}
+
+"""
+Ordering options for language connections.
+"""
+input LanguageOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order languages by.
+  """
+  field: LanguageOrderField!
+}
+
+"""
+Properties by which language connections can be ordered.
+"""
+enum LanguageOrderField {
+  """
+  Order languages by the size of all files containing the language
+  """
+  SIZE
+}
+
+"""
+A repository's open source license
+"""
+type License implements Node {
+  """
+  The full text of the license
+  """
+  body: String!
+
+  """
+  The conditions set by the license
+  """
+  conditions: [LicenseRule]!
+
+  """
+  A human-readable description of the license
+  """
+  description: String
+
+  """
+  Whether the license should be featured
+  """
+  featured: Boolean!
+
+  """
+  Whether the license should be displayed in license pickers
+  """
+  hidden: Boolean!
+
+  """
+  The Node ID of the License object
+  """
+  id: ID!
+
+  """
+  Instructions on how to implement the license
+  """
+  implementation: String
+
+  """
+  The lowercased SPDX ID of the license
+  """
+  key: String!
+
+  """
+  The limitations set by the license
+  """
+  limitations: [LicenseRule]!
+
+  """
+  The license full name specified by <https://spdx.org/licenses>
+  """
+  name: String!
+
+  """
+  Customary short name if applicable (e.g, GPLv3)
+  """
+  nickname: String
+
+  """
+  The permissions set by the license
+  """
+  permissions: [LicenseRule]!
+
+  """
+  Whether the license is a pseudo-license placeholder (e.g., other, no-license)
+  """
+  pseudoLicense: Boolean!
+
+  """
+  Short identifier specified by <https://spdx.org/licenses>
+  """
+  spdxId: String
+
+  """
+  URL to the license on <https://choosealicense.com>
+  """
+  url: URI
+}
+
+"""
+Describes a License's conditions, permissions, and limitations
+"""
+type LicenseRule {
+  """
+  A description of the rule
+  """
+  description: String!
+
+  """
+  The machine-readable rule key
+  """
+  key: String!
+
+  """
+  The human-readable rule label
+  """
+  label: String!
+}
+
+"""
+Autogenerated input type of LinkProjectV2ToRepository
+"""
+input LinkProjectV2ToRepositoryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the project to link to the repository.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  The ID of the repository to link to the project.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of LinkProjectV2ToRepository
+"""
+type LinkProjectV2ToRepositoryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The repository the project is linked to.
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of LinkProjectV2ToTeam
+"""
+input LinkProjectV2ToTeamInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the project to link to the team.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  The ID of the team to link to the project.
+  """
+  teamId: ID! @possibleTypes(concreteTypes: ["Team"])
+}
+
+"""
+Autogenerated return type of LinkProjectV2ToTeam
+"""
+type LinkProjectV2ToTeamPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The team the project is linked to
+  """
+  team: Team
+}
+
+"""
+Autogenerated input type of LinkRepositoryToProject
+"""
+input LinkRepositoryToProjectInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Project to link to a Repository
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["Project"])
+
+  """
+  The ID of the Repository to link to a Project.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of LinkRepositoryToProject
+"""
+type LinkRepositoryToProjectPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The linked Project.
+  """
+  project: Project
+
+  """
+  The linked Repository.
+  """
+  repository: Repository
+}
+
+"""
+A branch linked to an issue.
+"""
+type LinkedBranch implements Node {
+  """
+  The Node ID of the LinkedBranch object
+  """
+  id: ID!
+
+  """
+  The branch's ref.
+  """
+  ref: Ref
+}
+
+"""
+The connection type for LinkedBranch.
+"""
+type LinkedBranchConnection {
+  """
+  A list of edges.
+  """
+  edges: [LinkedBranchEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [LinkedBranch]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type LinkedBranchEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: LinkedBranch
+}
+
+"""
+Autogenerated input type of LockLockable
+"""
+input LockLockableInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A reason for why the item will be locked.
+  """
+  lockReason: LockReason
+
+  """
+  ID of the item to be locked.
+  """
+  lockableId: ID! @possibleTypes(concreteTypes: ["Discussion", "Issue", "PullRequest"], abstractType: "Lockable")
+}
+
+"""
+Autogenerated return type of LockLockable
+"""
+type LockLockablePayload {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The item that was locked.
+  """
+  lockedRecord: Lockable
+}
+
+"""
+The possible reasons that an issue or pull request was locked.
+"""
+enum LockReason {
+  """
+  The issue or pull request was locked because the conversation was off-topic.
+  """
+  OFF_TOPIC
+
+  """
+  The issue or pull request was locked because the conversation was resolved.
+  """
+  RESOLVED
+
+  """
+  The issue or pull request was locked because the conversation was spam.
+  """
+  SPAM
+
+  """
+  The issue or pull request was locked because the conversation was too heated.
+  """
+  TOO_HEATED
+}
+
+"""
+An object that can be locked.
+"""
+interface Lockable {
+  """
+  Reason that the conversation was locked.
+  """
+  activeLockReason: LockReason
+
+  """
+  `true` if the object is locked
+  """
+  locked: Boolean!
+}
+
+"""
+Represents a 'locked' event on a given issue or pull request.
+"""
+type LockedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the LockedEvent object
+  """
+  id: ID!
+
+  """
+  Reason that the conversation was locked (optional).
+  """
+  lockReason: LockReason
+
+  """
+  Object that was locked.
+  """
+  lockable: Lockable!
+}
+
+"""
+A placeholder user for attribution of imported data on GitHub.
+"""
+type Mannequin implements Actor & Node & UniformResourceLocatable {
+  """
+  A URL pointing to the GitHub App's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  The user that has claimed the data attributed to this mannequin.
+  """
+  claimant: User
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The mannequin's email on the source instance.
+  """
+  email: String
+
+  """
+  The Node ID of the Mannequin object
+  """
+  id: ID!
+
+  """
+  The username of the actor.
+  """
+  login: String!
+
+  """
+  The HTML path to this resource.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The URL to this resource.
+  """
+  url: URI!
+}
+
+"""
+The connection type for Mannequin.
+"""
+type MannequinConnection {
+  """
+  A list of edges.
+  """
+  edges: [MannequinEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Mannequin]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a mannequin.
+"""
+type MannequinEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Mannequin
+}
+
+"""
+Ordering options for mannequins.
+"""
+input MannequinOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order mannequins by.
+  """
+  field: MannequinOrderField!
+}
+
+"""
+Properties by which mannequins can be ordered.
+"""
+enum MannequinOrderField {
+  """
+  Order mannequins why when they were created.
+  """
+  CREATED_AT
+
+  """
+  Order mannequins alphabetically by their source login.
+  """
+  LOGIN
+}
+
+"""
+Autogenerated input type of MarkDiscussionCommentAsAnswer
+"""
+input MarkDiscussionCommentAsAnswerInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion comment to mark as an answer.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["DiscussionComment"])
+}
+
+"""
+Autogenerated return type of MarkDiscussionCommentAsAnswer
+"""
+type MarkDiscussionCommentAsAnswerPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The discussion that includes the chosen comment.
+  """
+  discussion: Discussion
+}
+
+"""
+Autogenerated input type of MarkFileAsViewed
+"""
+input MarkFileAsViewedInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The path of the file to mark as viewed
+  """
+  path: String!
+
+  """
+  The Node ID of the pull request.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of MarkFileAsViewed
+"""
+type MarkFileAsViewedPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated pull request.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Autogenerated input type of MarkProjectV2AsTemplate
+"""
+input MarkProjectV2AsTemplateInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Project to mark as a template.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of MarkProjectV2AsTemplate
+"""
+type MarkProjectV2AsTemplatePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The project.
+  """
+  projectV2: ProjectV2
+}
+
+"""
+Autogenerated input type of MarkPullRequestReadyForReview
+"""
+input MarkPullRequestReadyForReviewInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the pull request to be marked as ready for review.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of MarkPullRequestReadyForReview
+"""
+type MarkPullRequestReadyForReviewPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request that is ready for review.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'marked_as_duplicate' event on a given issue or pull request.
+"""
+type MarkedAsDuplicateEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  The authoritative issue or pull request which has been duplicated by another.
+  """
+  canonical: IssueOrPullRequest
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The issue or pull request which has been marked as a duplicate of another.
+  """
+  duplicate: IssueOrPullRequest
+
+  """
+  The Node ID of the MarkedAsDuplicateEvent object
+  """
+  id: ID!
+
+  """
+  Canonical and duplicate belong to different repositories.
+  """
+  isCrossRepository: Boolean!
+}
+
+"""
+A public description of a Marketplace category.
+"""
+type MarketplaceCategory implements Node {
+  """
+  The category's description.
+  """
+  description: String
+
+  """
+  The technical description of how apps listed in this category work with GitHub.
+  """
+  howItWorks: String
+
+  """
+  The Node ID of the MarketplaceCategory object
+  """
+  id: ID!
+
+  """
+  The category's name.
+  """
+  name: String!
+
+  """
+  How many Marketplace listings have this as their primary category.
+  """
+  primaryListingCount: Int!
+
+  """
+  The HTTP path for this Marketplace category.
+  """
+  resourcePath: URI!
+
+  """
+  How many Marketplace listings have this as their secondary category.
+  """
+  secondaryListingCount: Int!
+
+  """
+  The short name of the category used in its URL.
+  """
+  slug: String!
+
+  """
+  The HTTP URL for this Marketplace category.
+  """
+  url: URI!
+}
+
+"""
+A listing in the GitHub integration marketplace.
+"""
+type MarketplaceListing implements Node {
+  """
+  The GitHub App this listing represents.
+  """
+  app: App
+
+  """
+  URL to the listing owner's company site.
+  """
+  companyUrl: URI
+
+  """
+  The HTTP path for configuring access to the listing's integration or OAuth app
+  """
+  configurationResourcePath: URI!
+
+  """
+  The HTTP URL for configuring access to the listing's integration or OAuth app
+  """
+  configurationUrl: URI!
+
+  """
+  URL to the listing's documentation.
+  """
+  documentationUrl: URI
+
+  """
+  The listing's detailed description.
+  """
+  extendedDescription: String
+
+  """
+  The listing's detailed description rendered to HTML.
+  """
+  extendedDescriptionHTML: HTML!
+
+  """
+  The listing's introductory description.
+  """
+  fullDescription: String!
+
+  """
+  The listing's introductory description rendered to HTML.
+  """
+  fullDescriptionHTML: HTML!
+
+  """
+  Does this listing have any plans with a free trial?
+  """
+  hasPublishedFreeTrialPlans: Boolean!
+
+  """
+  Does this listing have a terms of service link?
+  """
+  hasTermsOfService: Boolean!
+
+  """
+  Whether the creator of the app is a verified org
+  """
+  hasVerifiedOwner: Boolean!
+
+  """
+  A technical description of how this app works with GitHub.
+  """
+  howItWorks: String
+
+  """
+  The listing's technical description rendered to HTML.
+  """
+  howItWorksHTML: HTML!
+
+  """
+  The Node ID of the MarketplaceListing object
+  """
+  id: ID!
+
+  """
+  URL to install the product to the viewer's account or organization.
+  """
+  installationUrl: URI
+
+  """
+  Whether this listing's app has been installed for the current viewer
+  """
+  installedForViewer: Boolean!
+
+  """
+  Whether this listing has been removed from the Marketplace.
+  """
+  isArchived: Boolean!
+
+  """
+  Whether this listing is still an editable draft that has not been submitted
+  for review and is not publicly visible in the Marketplace.
+  """
+  isDraft: Boolean!
+
+  """
+  Whether the product this listing represents is available as part of a paid plan.
+  """
+  isPaid: Boolean!
+
+  """
+  Whether this listing has been approved for display in the Marketplace.
+  """
+  isPublic: Boolean!
+
+  """
+  Whether this listing has been rejected by GitHub for display in the Marketplace.
+  """
+  isRejected: Boolean!
+
+  """
+  Whether this listing has been approved for unverified display in the Marketplace.
+  """
+  isUnverified: Boolean!
+
+  """
+  Whether this draft listing has been submitted for review for approval to be unverified in the Marketplace.
+  """
+  isUnverifiedPending: Boolean!
+
+  """
+  Whether this draft listing has been submitted for review from GitHub for approval to be verified in the Marketplace.
+  """
+  isVerificationPendingFromDraft: Boolean!
+
+  """
+  Whether this unverified listing has been submitted for review from GitHub for approval to be verified in the Marketplace.
+  """
+  isVerificationPendingFromUnverified: Boolean!
+
+  """
+  Whether this listing has been approved for verified display in the Marketplace.
+  """
+  isVerified: Boolean!
+
+  """
+  The hex color code, without the leading '#', for the logo background.
+  """
+  logoBackgroundColor: String!
+
+  """
+  URL for the listing's logo image.
+  """
+  logoUrl(
+    """
+    The size in pixels of the resulting square image.
+    """
+    size: Int = 400
+  ): URI
+
+  """
+  The listing's full name.
+  """
+  name: String!
+
+  """
+  The listing's very short description without a trailing period or ampersands.
+  """
+  normalizedShortDescription: String!
+
+  """
+  URL to the listing's detailed pricing.
+  """
+  pricingUrl: URI
+
+  """
+  The category that best describes the listing.
+  """
+  primaryCategory: MarketplaceCategory!
+
+  """
+  URL to the listing's privacy policy, may return an empty string for listings that do not require a privacy policy URL.
+  """
+  privacyPolicyUrl: URI!
+
+  """
+  The HTTP path for the Marketplace listing.
+  """
+  resourcePath: URI!
+
+  """
+  The URLs for the listing's screenshots.
+  """
+  screenshotUrls: [String]!
+
+  """
+  An alternate category that describes the listing.
+  """
+  secondaryCategory: MarketplaceCategory
+
+  """
+  The listing's very short description.
+  """
+  shortDescription: String!
+
+  """
+  The short name of the listing used in its URL.
+  """
+  slug: String!
+
+  """
+  URL to the listing's status page.
+  """
+  statusUrl: URI
+
+  """
+  An email address for support for this listing's app.
+  """
+  supportEmail: String
+
+  """
+  Either a URL or an email address for support for this listing's app, may
+  return an empty string for listings that do not require a support URL.
+  """
+  supportUrl: URI!
+
+  """
+  URL to the listing's terms of service.
+  """
+  termsOfServiceUrl: URI
+
+  """
+  The HTTP URL for the Marketplace listing.
+  """
+  url: URI!
+
+  """
+  Can the current viewer add plans for this Marketplace listing.
+  """
+  viewerCanAddPlans: Boolean!
+
+  """
+  Can the current viewer approve this Marketplace listing.
+  """
+  viewerCanApprove: Boolean!
+
+  """
+  Can the current viewer delist this Marketplace listing.
+  """
+  viewerCanDelist: Boolean!
+
+  """
+  Can the current viewer edit this Marketplace listing.
+  """
+  viewerCanEdit: Boolean!
+
+  """
+  Can the current viewer edit the primary and secondary category of this
+  Marketplace listing.
+  """
+  viewerCanEditCategories: Boolean!
+
+  """
+  Can the current viewer edit the plans for this Marketplace listing.
+  """
+  viewerCanEditPlans: Boolean!
+
+  """
+  Can the current viewer return this Marketplace listing to draft state
+  so it becomes editable again.
+  """
+  viewerCanRedraft: Boolean!
+
+  """
+  Can the current viewer reject this Marketplace listing by returning it to
+  an editable draft state or rejecting it entirely.
+  """
+  viewerCanReject: Boolean!
+
+  """
+  Can the current viewer request this listing be reviewed for display in
+  the Marketplace as verified.
+  """
+  viewerCanRequestApproval: Boolean!
+
+  """
+  Indicates whether the current user has an active subscription to this Marketplace listing.
+  """
+  viewerHasPurchased: Boolean!
+
+  """
+  Indicates if the current user has purchased a subscription to this Marketplace listing
+  for all of the organizations the user owns.
+  """
+  viewerHasPurchasedForAllOrganizations: Boolean!
+
+  """
+  Does the current viewer role allow them to administer this Marketplace listing.
+  """
+  viewerIsListingAdmin: Boolean!
+}
+
+"""
+Look up Marketplace Listings
+"""
+type MarketplaceListingConnection {
+  """
+  A list of edges.
+  """
+  edges: [MarketplaceListingEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [MarketplaceListing]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type MarketplaceListingEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: MarketplaceListing
+}
+
+"""
+Represents a member feature request notification
+"""
+type MemberFeatureRequestNotification implements Node {
+  """
+  Represents member feature request body containing organization name and the number of feature requests
+  """
+  body: String!
+
+  """
+  The Node ID of the MemberFeatureRequestNotification object
+  """
+  id: ID!
+
+  """
+  Represents member feature request notification title
+  """
+  title: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+Entities that have members who can set status messages.
+"""
+interface MemberStatusable {
+  """
+  Get the status messages members of this entity have set that are either public or visible only to the organization.
+  """
+  memberStatuses(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for user statuses returned from the connection.
+    """
+    orderBy: UserStatusOrder = {field: UPDATED_AT, direction: DESC}
+  ): UserStatusConnection!
+}
+
+"""
+Audit log entry for a members_can_delete_repos.clear event.
+"""
+type MembersCanDeleteReposClearAuditEntry implements AuditEntry & EnterpriseAuditEntryData & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+
+  """
+  The Node ID of the MembersCanDeleteReposClearAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a members_can_delete_repos.disable event.
+"""
+type MembersCanDeleteReposDisableAuditEntry implements AuditEntry & EnterpriseAuditEntryData & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+
+  """
+  The Node ID of the MembersCanDeleteReposDisableAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a members_can_delete_repos.enable event.
+"""
+type MembersCanDeleteReposEnableAuditEntry implements AuditEntry & EnterpriseAuditEntryData & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+
+  """
+  The Node ID of the MembersCanDeleteReposEnableAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Represents a 'mentioned' event on a given issue or pull request.
+"""
+type MentionedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the MentionedEvent object
+  """
+  id: ID!
+}
+
+"""
+Autogenerated input type of MergeBranch
+"""
+input MergeBranchInput {
+  """
+  The email address to associate with this commit.
+  """
+  authorEmail: String
+
+  """
+  The name of the base branch that the provided head will be merged into.
+  """
+  base: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Message to use for the merge commit. If omitted, a default will be used.
+  """
+  commitMessage: String
+
+  """
+  The head to merge into the base branch. This can be a branch name or a commit GitObjectID.
+  """
+  head: String!
+
+  """
+  The Node ID of the Repository containing the base branch that will be modified.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of MergeBranch
+"""
+type MergeBranchPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The resulting merge Commit.
+  """
+  mergeCommit: Commit
+}
+
+"""
+The possible default commit messages for merges.
+"""
+enum MergeCommitMessage {
+  """
+  Default to a blank commit message.
+  """
+  BLANK
+
+  """
+  Default to the pull request's body.
+  """
+  PR_BODY
+
+  """
+  Default to the pull request's title.
+  """
+  PR_TITLE
+}
+
+"""
+The possible default commit titles for merges.
+"""
+enum MergeCommitTitle {
+  """
+  Default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name).
+  """
+  MERGE_MESSAGE
+
+  """
+  Default to the pull request's title.
+  """
+  PR_TITLE
+}
+
+"""
+Autogenerated input type of MergePullRequest
+"""
+input MergePullRequestInput {
+  """
+  The email address to associate with this merge.
+  """
+  authorEmail: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Commit body to use for the merge commit; if omitted, a default message will be used
+  """
+  commitBody: String
+
+  """
+  Commit headline to use for the merge commit; if omitted, a default message will be used.
+  """
+  commitHeadline: String
+
+  """
+  OID that the pull request head ref must match to allow merge; if omitted, no check is performed.
+  """
+  expectedHeadOid: GitObjectID
+
+  """
+  The merge method to use. If omitted, defaults to 'MERGE'
+  """
+  mergeMethod: PullRequestMergeMethod = MERGE
+
+  """
+  ID of the pull request to be merged.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of MergePullRequest
+"""
+type MergePullRequestPayload {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request that was merged.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+The queue of pull request entries to be merged into a protected branch in a repository.
+"""
+type MergeQueue implements Node {
+  """
+  The configuration for this merge queue
+  """
+  configuration: MergeQueueConfiguration
+
+  """
+  The entries in the queue
+  """
+  entries(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): MergeQueueEntryConnection
+
+  """
+  The Node ID of the MergeQueue object
+  """
+  id: ID!
+
+  """
+  The estimated time in seconds until a newly added entry would be merged
+  """
+  nextEntryEstimatedTimeToMerge: Int
+
+  """
+  The repository this merge queue belongs to
+  """
+  repository: Repository
+
+  """
+  The HTTP path for this merge queue
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this merge queue
+  """
+  url: URI!
+}
+
+"""
+Configuration for a MergeQueue
+"""
+type MergeQueueConfiguration {
+  """
+  The amount of time in minutes to wait for a check response before considering it a failure.
+  """
+  checkResponseTimeout: Int
+
+  """
+  The maximum number of entries to build at once.
+  """
+  maximumEntriesToBuild: Int
+
+  """
+  The maximum number of entries to merge at once.
+  """
+  maximumEntriesToMerge: Int
+
+  """
+  The merge method to use for this queue.
+  """
+  mergeMethod: PullRequestMergeMethod
+
+  """
+  The strategy to use when merging entries.
+  """
+  mergingStrategy: MergeQueueMergingStrategy
+
+  """
+  The minimum number of entries required to merge at once.
+  """
+  minimumEntriesToMerge: Int
+
+  """
+  The amount of time in minutes to wait before ignoring the minimum number of
+  entries in the queue requirement and merging a collection of entries
+  """
+  minimumEntriesToMergeWaitTime: Int
+}
+
+"""
+Entries in a MergeQueue
+"""
+type MergeQueueEntry implements Node {
+  """
+  The base commit for this entry
+  """
+  baseCommit: Commit
+
+  """
+  The date and time this entry was added to the merge queue
+  """
+  enqueuedAt: DateTime!
+
+  """
+  The actor that enqueued this entry
+  """
+  enqueuer: Actor!
+
+  """
+  The estimated time in seconds until this entry will be merged
+  """
+  estimatedTimeToMerge: Int
+
+  """
+  The head commit for this entry
+  """
+  headCommit: Commit
+
+  """
+  The Node ID of the MergeQueueEntry object
+  """
+  id: ID!
+
+  """
+  Whether this pull request should jump the queue
+  """
+  jump: Boolean!
+
+  """
+  The merge queue that this entry belongs to
+  """
+  mergeQueue: MergeQueue
+
+  """
+  The position of this entry in the queue
+  """
+  position: Int!
+
+  """
+  The pull request that will be added to a merge group
+  """
+  pullRequest: PullRequest
+
+  """
+  Does this pull request need to be deployed on its own
+  """
+  solo: Boolean!
+
+  """
+  The state of this entry in the queue
+  """
+  state: MergeQueueEntryState!
+}
+
+"""
+The connection type for MergeQueueEntry.
+"""
+type MergeQueueEntryConnection {
+  """
+  A list of edges.
+  """
+  edges: [MergeQueueEntryEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [MergeQueueEntry]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type MergeQueueEntryEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: MergeQueueEntry
+}
+
+"""
+The possible states for a merge queue entry.
+"""
+enum MergeQueueEntryState {
+  """
+  The entry is currently waiting for checks to pass.
+  """
+  AWAITING_CHECKS
+
+  """
+  The entry is currently locked.
+  """
+  LOCKED
+
+  """
+  The entry is currently mergeable.
+  """
+  MERGEABLE
+
+  """
+  The entry is currently queued.
+  """
+  QUEUED
+
+  """
+  The entry is currently unmergeable.
+  """
+  UNMERGEABLE
+}
+
+"""
+The possible merging strategies for a merge queue.
+"""
+enum MergeQueueMergingStrategy {
+  """
+  Entries only allowed to merge if they are passing.
+  """
+  ALLGREEN
+
+  """
+  Failing Entries are allowed to merge if they are with a passing entry.
+  """
+  HEADGREEN
+}
+
+"""
+Detailed status information about a pull request merge.
+"""
+enum MergeStateStatus {
+  """
+  The head ref is out of date.
+  """
+  BEHIND
+
+  """
+  The merge is blocked.
+  """
+  BLOCKED
+
+  """
+  Mergeable and passing commit status.
+  """
+  CLEAN
+
+  """
+  The merge commit cannot be cleanly created.
+  """
+  DIRTY
+
+  """
+  The merge is blocked due to the pull request being a draft.
+  """
+  DRAFT
+    @deprecated(
+      reason: "DRAFT state will be removed from this enum and `isDraft` should be used instead Use PullRequest.isDraft instead. Removal on 2021-01-01 UTC."
+    )
+
+  """
+  Mergeable with passing commit status and pre-receive hooks.
+  """
+  HAS_HOOKS
+
+  """
+  The state cannot currently be determined.
+  """
+  UNKNOWN
+
+  """
+  Mergeable with non-passing commit status.
+  """
+  UNSTABLE
+}
+
+"""
+Whether or not a PullRequest can be merged.
+"""
+enum MergeableState {
+  """
+  The pull request cannot be merged due to merge conflicts.
+  """
+  CONFLICTING
+
+  """
+  The pull request can be merged.
+  """
+  MERGEABLE
+
+  """
+  The mergeability of the pull request is still being calculated.
+  """
+  UNKNOWN
+}
+
+"""
+Represents a 'merged' event on a given pull request.
+"""
+type MergedEvent implements Node & UniformResourceLocatable {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the commit associated with the `merge` event.
+  """
+  commit: Commit
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the MergedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the Ref associated with the `merge` event.
+  """
+  mergeRef: Ref
+
+  """
+  Identifies the name of the Ref associated with the `merge` event.
+  """
+  mergeRefName: String!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  The HTTP path for this merged event.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this merged event.
+  """
+  url: URI!
+}
+
+"""
+Represents a GitHub Enterprise Importer (GEI) migration.
+"""
+interface Migration {
+  """
+  The migration flag to continue on error.
+  """
+  continueOnError: Boolean!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: String
+
+  """
+  The reason the migration failed.
+  """
+  failureReason: String
+
+  """
+  The Node ID of the Migration object
+  """
+  id: ID!
+
+  """
+  The URL for the migration log (expires 1 day after migration completes).
+  """
+  migrationLogUrl: URI
+
+  """
+  The migration source.
+  """
+  migrationSource: MigrationSource!
+
+  """
+  The target repository name.
+  """
+  repositoryName: String!
+
+  """
+  The migration source URL, for example `https://github.com` or `https://monalisa.ghe.com`.
+  """
+  sourceUrl: URI!
+
+  """
+  The migration state.
+  """
+  state: MigrationState!
+
+  """
+  The number of warnings encountered for this migration. To review the warnings,
+  check the [Migration Log](https://docs.github.com/en/migrations/using-github-enterprise-importer/completing-your-migration-with-github-enterprise-importer/accessing-your-migration-logs-for-github-enterprise-importer).
+  """
+  warningsCount: Int!
+}
+
+"""
+A GitHub Enterprise Importer (GEI) migration source.
+"""
+type MigrationSource implements Node {
+  """
+  The Node ID of the MigrationSource object
+  """
+  id: ID!
+
+  """
+  The migration source name.
+  """
+  name: String!
+
+  """
+  The migration source type.
+  """
+  type: MigrationSourceType!
+
+  """
+  The migration source URL, for example `https://github.com` or `https://monalisa.ghe.com`.
+  """
+  url: URI!
+}
+
+"""
+Represents the different GitHub Enterprise Importer (GEI) migration sources.
+"""
+enum MigrationSourceType {
+  """
+  An Azure DevOps migration source.
+  """
+  AZURE_DEVOPS
+
+  """
+  A Bitbucket Server migration source.
+  """
+  BITBUCKET_SERVER
+
+  """
+  A GitHub Migration API source.
+  """
+  GITHUB_ARCHIVE
+}
+
+"""
+The GitHub Enterprise Importer (GEI) migration state.
+"""
+enum MigrationState {
+  """
+  The migration has failed.
+  """
+  FAILED
+
+  """
+  The migration has invalid credentials.
+  """
+  FAILED_VALIDATION
+
+  """
+  The migration is in progress.
+  """
+  IN_PROGRESS
+
+  """
+  The migration has not started.
+  """
+  NOT_STARTED
+
+  """
+  The migration needs to have its credentials validated.
+  """
+  PENDING_VALIDATION
+
+  """
+  The migration has been queued.
+  """
+  QUEUED
+
+  """
+  The migration has succeeded.
+  """
+  SUCCEEDED
+}
+
+"""
+Represents a Milestone object on a given repository.
+"""
+type Milestone implements Closable & Node & UniformResourceLocatable {
+  """
+  Indicates if the object is closed (definition of closed may depend on type)
+  """
+  closed: Boolean!
+
+  """
+  Identifies the date and time when the object was closed.
+  """
+  closedAt: DateTime
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the actor who created the milestone.
+  """
+  creator: Actor
+
+  """
+  Identifies the description of the milestone.
+  """
+  description: String
+
+  """
+  Identifies the due date of the milestone.
+  """
+  dueOn: DateTime
+
+  """
+  The Node ID of the Milestone object
+  """
+  id: ID!
+
+  """
+  A list of issues associated with the milestone.
+  """
+  issues(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filtering options for issues returned from the connection.
+    """
+    filterBy: IssueFilters
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for issues returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the issues by.
+    """
+    states: [IssueState!]
+  ): IssueConnection!
+
+  """
+  Identifies the number of the milestone.
+  """
+  number: Int!
+
+  """
+  Identifies the percentage complete for the milestone
+  """
+  progressPercentage: Float!
+
+  """
+  A list of pull requests associated with the milestone.
+  """
+  pullRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    The base ref name to filter the pull requests by.
+    """
+    baseRefName: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    The head ref name to filter the pull requests by.
+    """
+    headRefName: String
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pull requests returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the pull requests by.
+    """
+    states: [PullRequestState!]
+  ): PullRequestConnection!
+
+  """
+  The repository associated with this milestone.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this milestone
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the state of the milestone.
+  """
+  state: MilestoneState!
+
+  """
+  Identifies the title of the milestone.
+  """
+  title: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this milestone
+  """
+  url: URI!
+
+  """
+  Indicates if the object can be closed by the viewer.
+  """
+  viewerCanClose: Boolean!
+
+  """
+  Indicates if the object can be reopened by the viewer.
+  """
+  viewerCanReopen: Boolean!
+}
+
+"""
+The connection type for Milestone.
+"""
+type MilestoneConnection {
+  """
+  A list of edges.
+  """
+  edges: [MilestoneEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Milestone]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type MilestoneEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Milestone
+}
+
+"""
+Types that can be inside a Milestone.
+"""
+union MilestoneItem = Issue | PullRequest
+
+"""
+Ordering options for milestone connections.
+"""
+input MilestoneOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order milestones by.
+  """
+  field: MilestoneOrderField!
+}
+
+"""
+Properties by which milestone connections can be ordered.
+"""
+enum MilestoneOrderField {
+  """
+  Order milestones by when they were created.
+  """
+  CREATED_AT
+
+  """
+  Order milestones by when they are due.
+  """
+  DUE_DATE
+
+  """
+  Order milestones by their number.
+  """
+  NUMBER
+
+  """
+  Order milestones by when they were last updated.
+  """
+  UPDATED_AT
+}
+
+"""
+The possible states of a milestone.
+"""
+enum MilestoneState {
+  """
+  A milestone that has been closed.
+  """
+  CLOSED
+
+  """
+  A milestone that is still open.
+  """
+  OPEN
+}
+
+"""
+Represents a 'milestoned' event on a given issue or pull request.
+"""
+type MilestonedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the MilestonedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the milestone title associated with the 'milestoned' event.
+  """
+  milestoneTitle: String!
+
+  """
+  Object referenced by event.
+  """
+  subject: MilestoneItem!
+}
+
+"""
+Entities that can be minimized.
+"""
+interface Minimizable {
+  """
+  Returns whether or not a comment has been minimized.
+  """
+  isMinimized: Boolean!
+
+  """
+  Returns why the comment was minimized. One of `abuse`, `off-topic`,
+  `outdated`, `resolved`, `duplicate` and `spam`. Note that the case and
+  formatting of these values differs from the inputs to the `MinimizeComment` mutation.
+  """
+  minimizedReason: String
+
+  """
+  Check if the current viewer can minimize this object.
+  """
+  viewerCanMinimize: Boolean!
+}
+
+"""
+Autogenerated input type of MinimizeComment
+"""
+input MinimizeCommentInput {
+  """
+  The classification of comment
+  """
+  classifier: ReportedContentClassifiers!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the subject to modify.
+  """
+  subjectId: ID!
+    @possibleTypes(
+      concreteTypes: [
+        "CommitComment"
+        "DiscussionComment"
+        "GistComment"
+        "IssueComment"
+        "PullRequestReview"
+        "PullRequestReviewComment"
+      ]
+      abstractType: "Minimizable"
+    )
+}
+
+"""
+Autogenerated return type of MinimizeComment
+"""
+type MinimizeCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The comment that was minimized.
+  """
+  minimizedComment: Minimizable
+}
+
+"""
+Autogenerated input type of MoveProjectCard
+"""
+input MoveProjectCardInput {
+  """
+  Place the new card after the card with this id. Pass null to place it at the top.
+  """
+  afterCardId: ID @possibleTypes(concreteTypes: ["ProjectCard"])
+
+  """
+  The id of the card to move.
+  """
+  cardId: ID! @possibleTypes(concreteTypes: ["ProjectCard"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the column to move it into.
+  """
+  columnId: ID! @possibleTypes(concreteTypes: ["ProjectColumn"])
+}
+
+"""
+Autogenerated return type of MoveProjectCard
+"""
+type MoveProjectCardPayload {
+  """
+  The new edge of the moved card.
+  """
+  cardEdge: ProjectCardEdge
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of MoveProjectColumn
+"""
+input MoveProjectColumnInput {
+  """
+  Place the new column after the column with this id. Pass null to place it at the front.
+  """
+  afterColumnId: ID @possibleTypes(concreteTypes: ["ProjectColumn"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the column to move.
+  """
+  columnId: ID! @possibleTypes(concreteTypes: ["ProjectColumn"])
+}
+
+"""
+Autogenerated return type of MoveProjectColumn
+"""
+type MoveProjectColumnPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new edge of the moved column.
+  """
+  columnEdge: ProjectColumnEdge
+}
+
+"""
+Represents a 'moved_columns_in_project' event on a given issue or pull request.
+"""
+type MovedColumnsInProjectEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the MovedColumnsInProjectEvent object
+  """
+  id: ID!
+
+  """
+  Column name the issue or pull request was moved from.
+  """
+  previousProjectColumnName: String! @preview(toggledBy: "starfox-preview")
+
+  """
+  Project referenced by event.
+  """
+  project: Project @preview(toggledBy: "starfox-preview")
+
+  """
+  Project card referenced by this project event.
+  """
+  projectCard: ProjectCard @preview(toggledBy: "starfox-preview")
+
+  """
+  Column name the issue or pull request was moved to.
+  """
+  projectColumnName: String! @preview(toggledBy: "starfox-preview")
+}
+
+"""
+The root query for implementing GraphQL mutations.
+"""
+type Mutation {
+  """
+  Clear all of a customer's queued migrations
+  """
+  abortQueuedMigrations(
+    """
+    Parameters for AbortQueuedMigrations
+    """
+    input: AbortQueuedMigrationsInput!
+  ): AbortQueuedMigrationsPayload
+
+  """
+  Abort a repository migration queued or in progress.
+  """
+  abortRepositoryMigration(
+    """
+    Parameters for AbortRepositoryMigration
+    """
+    input: AbortRepositoryMigrationInput!
+  ): AbortRepositoryMigrationPayload
+
+  """
+  Accepts a pending invitation for a user to become an administrator of an enterprise.
+  """
+  acceptEnterpriseAdministratorInvitation(
+    """
+    Parameters for AcceptEnterpriseAdministratorInvitation
+    """
+    input: AcceptEnterpriseAdministratorInvitationInput!
+  ): AcceptEnterpriseAdministratorInvitationPayload
+
+  """
+  Applies a suggested topic to the repository.
+  """
+  acceptTopicSuggestion(
+    """
+    Parameters for AcceptTopicSuggestion
+    """
+    input: AcceptTopicSuggestionInput!
+  ): AcceptTopicSuggestionPayload
+
+  """
+  Adds assignees to an assignable object.
+  """
+  addAssigneesToAssignable(
+    """
+    Parameters for AddAssigneesToAssignable
+    """
+    input: AddAssigneesToAssignableInput!
+  ): AddAssigneesToAssignablePayload
+
+  """
+  Adds a comment to an Issue or Pull Request.
+  """
+  addComment(
+    """
+    Parameters for AddComment
+    """
+    input: AddCommentInput!
+  ): AddCommentPayload
+
+  """
+  Adds a comment to a Discussion, possibly as a reply to another comment.
+  """
+  addDiscussionComment(
+    """
+    Parameters for AddDiscussionComment
+    """
+    input: AddDiscussionCommentInput!
+  ): AddDiscussionCommentPayload
+
+  """
+  Vote for an option in a discussion poll.
+  """
+  addDiscussionPollVote(
+    """
+    Parameters for AddDiscussionPollVote
+    """
+    input: AddDiscussionPollVoteInput!
+  ): AddDiscussionPollVotePayload
+
+  """
+  Adds enterprise members to an organization within the enterprise.
+  """
+  addEnterpriseOrganizationMember(
+    """
+    Parameters for AddEnterpriseOrganizationMember
+    """
+    input: AddEnterpriseOrganizationMemberInput!
+  ): AddEnterpriseOrganizationMemberPayload
+
+  """
+  Adds a support entitlement to an enterprise member.
+  """
+  addEnterpriseSupportEntitlement(
+    """
+    Parameters for AddEnterpriseSupportEntitlement
+    """
+    input: AddEnterpriseSupportEntitlementInput!
+  ): AddEnterpriseSupportEntitlementPayload
+
+  """
+  Adds labels to a labelable object.
+  """
+  addLabelsToLabelable(
+    """
+    Parameters for AddLabelsToLabelable
+    """
+    input: AddLabelsToLabelableInput!
+  ): AddLabelsToLabelablePayload
+
+  """
+  Adds a card to a ProjectColumn. Either `contentId` or `note` must be provided but **not** both.
+  """
+  addProjectCard(
+    """
+    Parameters for AddProjectCard
+    """
+    input: AddProjectCardInput!
+  ): AddProjectCardPayload
+
+  """
+  Adds a column to a Project.
+  """
+  addProjectColumn(
+    """
+    Parameters for AddProjectColumn
+    """
+    input: AddProjectColumnInput!
+  ): AddProjectColumnPayload
+
+  """
+  Creates a new draft issue and add it to a Project.
+  """
+  addProjectV2DraftIssue(
+    """
+    Parameters for AddProjectV2DraftIssue
+    """
+    input: AddProjectV2DraftIssueInput!
+  ): AddProjectV2DraftIssuePayload
+
+  """
+  Links an existing content instance to a Project.
+  """
+  addProjectV2ItemById(
+    """
+    Parameters for AddProjectV2ItemById
+    """
+    input: AddProjectV2ItemByIdInput!
+  ): AddProjectV2ItemByIdPayload
+
+  """
+  Adds a review to a Pull Request.
+  """
+  addPullRequestReview(
+    """
+    Parameters for AddPullRequestReview
+    """
+    input: AddPullRequestReviewInput!
+  ): AddPullRequestReviewPayload
+
+  """
+  Adds a comment to a review.
+  """
+  addPullRequestReviewComment(
+    """
+    Parameters for AddPullRequestReviewComment
+    """
+    input: AddPullRequestReviewCommentInput!
+  ): AddPullRequestReviewCommentPayload
+
+  """
+  Adds a new thread to a pending Pull Request Review.
+  """
+  addPullRequestReviewThread(
+    """
+    Parameters for AddPullRequestReviewThread
+    """
+    input: AddPullRequestReviewThreadInput!
+  ): AddPullRequestReviewThreadPayload
+
+  """
+  Adds a reply to an existing Pull Request Review Thread.
+  """
+  addPullRequestReviewThreadReply(
+    """
+    Parameters for AddPullRequestReviewThreadReply
+    """
+    input: AddPullRequestReviewThreadReplyInput!
+  ): AddPullRequestReviewThreadReplyPayload
+
+  """
+  Adds a reaction to a subject.
+  """
+  addReaction(
+    """
+    Parameters for AddReaction
+    """
+    input: AddReactionInput!
+  ): AddReactionPayload
+
+  """
+  Adds a star to a Starrable.
+  """
+  addStar(
+    """
+    Parameters for AddStar
+    """
+    input: AddStarInput!
+  ): AddStarPayload
+
+  """
+  Add an upvote to a discussion or discussion comment.
+  """
+  addUpvote(
+    """
+    Parameters for AddUpvote
+    """
+    input: AddUpvoteInput!
+  ): AddUpvotePayload
+
+  """
+  Adds a verifiable domain to an owning account.
+  """
+  addVerifiableDomain(
+    """
+    Parameters for AddVerifiableDomain
+    """
+    input: AddVerifiableDomainInput!
+  ): AddVerifiableDomainPayload
+
+  """
+  Approve all pending deployments under one or more environments
+  """
+  approveDeployments(
+    """
+    Parameters for ApproveDeployments
+    """
+    input: ApproveDeploymentsInput!
+  ): ApproveDeploymentsPayload
+
+  """
+  Approve a verifiable domain for notification delivery.
+  """
+  approveVerifiableDomain(
+    """
+    Parameters for ApproveVerifiableDomain
+    """
+    input: ApproveVerifiableDomainInput!
+  ): ApproveVerifiableDomainPayload
+
+  """
+  Archives a ProjectV2Item
+  """
+  archiveProjectV2Item(
+    """
+    Parameters for ArchiveProjectV2Item
+    """
+    input: ArchiveProjectV2ItemInput!
+  ): ArchiveProjectV2ItemPayload
+
+  """
+  Marks a repository as archived.
+  """
+  archiveRepository(
+    """
+    Parameters for ArchiveRepository
+    """
+    input: ArchiveRepositoryInput!
+  ): ArchiveRepositoryPayload
+
+  """
+  Cancels a pending invitation for an administrator to join an enterprise.
+  """
+  cancelEnterpriseAdminInvitation(
+    """
+    Parameters for CancelEnterpriseAdminInvitation
+    """
+    input: CancelEnterpriseAdminInvitationInput!
+  ): CancelEnterpriseAdminInvitationPayload
+
+  """
+  Cancel an active sponsorship.
+  """
+  cancelSponsorship(
+    """
+    Parameters for CancelSponsorship
+    """
+    input: CancelSponsorshipInput!
+  ): CancelSponsorshipPayload
+
+  """
+  Update your status on GitHub.
+  """
+  changeUserStatus(
+    """
+    Parameters for ChangeUserStatus
+    """
+    input: ChangeUserStatusInput!
+  ): ChangeUserStatusPayload
+
+  """
+  Clears all labels from a labelable object.
+  """
+  clearLabelsFromLabelable(
+    """
+    Parameters for ClearLabelsFromLabelable
+    """
+    input: ClearLabelsFromLabelableInput!
+  ): ClearLabelsFromLabelablePayload
+
+  """
+  This mutation clears the value of a field for an item in a Project. Currently
+  only text, number, date, assignees, labels, single-select, iteration and
+  milestone fields are supported.
+  """
+  clearProjectV2ItemFieldValue(
+    """
+    Parameters for ClearProjectV2ItemFieldValue
+    """
+    input: ClearProjectV2ItemFieldValueInput!
+  ): ClearProjectV2ItemFieldValuePayload
+
+  """
+  Creates a new project by cloning configuration from an existing project.
+  """
+  cloneProject(
+    """
+    Parameters for CloneProject
+    """
+    input: CloneProjectInput!
+  ): CloneProjectPayload
+
+  """
+  Create a new repository with the same files and directory structure as a template repository.
+  """
+  cloneTemplateRepository(
+    """
+    Parameters for CloneTemplateRepository
+    """
+    input: CloneTemplateRepositoryInput!
+  ): CloneTemplateRepositoryPayload
+
+  """
+  Close a discussion.
+  """
+  closeDiscussion(
+    """
+    Parameters for CloseDiscussion
+    """
+    input: CloseDiscussionInput!
+  ): CloseDiscussionPayload
+
+  """
+  Close an issue.
+  """
+  closeIssue(
+    """
+    Parameters for CloseIssue
+    """
+    input: CloseIssueInput!
+  ): CloseIssuePayload
+
+  """
+  Close a pull request.
+  """
+  closePullRequest(
+    """
+    Parameters for ClosePullRequest
+    """
+    input: ClosePullRequestInput!
+  ): ClosePullRequestPayload
+
+  """
+  Convert a project note card to one associated with a newly created issue.
+  """
+  convertProjectCardNoteToIssue(
+    """
+    Parameters for ConvertProjectCardNoteToIssue
+    """
+    input: ConvertProjectCardNoteToIssueInput!
+  ): ConvertProjectCardNoteToIssuePayload
+
+  """
+  Converts a pull request to draft
+  """
+  convertPullRequestToDraft(
+    """
+    Parameters for ConvertPullRequestToDraft
+    """
+    input: ConvertPullRequestToDraftInput!
+  ): ConvertPullRequestToDraftPayload
+
+  """
+  Copy a project.
+  """
+  copyProjectV2(
+    """
+    Parameters for CopyProjectV2
+    """
+    input: CopyProjectV2Input!
+  ): CopyProjectV2Payload
+
+  """
+  Invites a user to claim reattributable data
+  """
+  createAttributionInvitation(
+    """
+    Parameters for CreateAttributionInvitation
+    """
+    input: CreateAttributionInvitationInput!
+  ): CreateAttributionInvitationPayload
+
+  """
+  Create a new branch protection rule
+  """
+  createBranchProtectionRule(
+    """
+    Parameters for CreateBranchProtectionRule
+    """
+    input: CreateBranchProtectionRuleInput!
+  ): CreateBranchProtectionRulePayload
+
+  """
+  Create a check run.
+  """
+  createCheckRun(
+    """
+    Parameters for CreateCheckRun
+    """
+    input: CreateCheckRunInput!
+  ): CreateCheckRunPayload
+
+  """
+  Create a check suite
+  """
+  createCheckSuite(
+    """
+    Parameters for CreateCheckSuite
+    """
+    input: CreateCheckSuiteInput!
+  ): CreateCheckSuitePayload
+
+  """
+  Appends a commit to the given branch as the authenticated user.
+
+  This mutation creates a commit whose parent is the HEAD of the provided
+  branch and also updates that branch to point to the new commit.
+  It can be thought of as similar to `git commit`.
+
+  ### Locating a Branch
+
+  Commits are appended to a `branch` of type `Ref`.
+  This must refer to a git branch (i.e.  the fully qualified path must
+  begin with `refs/heads/`, although including this prefix is optional.
+
+  Callers may specify the `branch` to commit to either by its global node
+  ID or by passing both of `repositoryNameWithOwner` and `refName`.  For
+  more details see the documentation for `CommittableBranch`.
+
+  ### Describing Changes
+
+  `fileChanges` are specified as a `FilesChanges` object describing
+  `FileAdditions` and `FileDeletions`.
+
+  Please see the documentation for `FileChanges` for more information on
+  how to use this argument to describe any set of file changes.
+
+  ### Authorship
+
+  Similar to the web commit interface, this mutation does not support
+  specifying the author or committer of the commit and will not add
+  support for this in the future.
+
+  A commit created by a successful execution of this mutation will be
+  authored by the owner of the credential which authenticates the API
+  request.  The committer will be identical to that of commits authored
+  using the web interface.
+
+  If you need full control over author and committer information, please
+  use the Git Database REST API instead.
+
+  ### Commit Signing
+
+  Commits made using this mutation are automatically signed by GitHub if
+  supported and will be marked as verified in the user interface.
+  """
+  createCommitOnBranch(
+    """
+    Parameters for CreateCommitOnBranch
+    """
+    input: CreateCommitOnBranchInput!
+  ): CreateCommitOnBranchPayload
+
+  """
+  Creates a new deployment event.
+  """
+  createDeployment(
+    """
+    Parameters for CreateDeployment
+    """
+    input: CreateDeploymentInput!
+  ): CreateDeploymentPayload @preview(toggledBy: "flash-preview")
+
+  """
+  Create a deployment status.
+  """
+  createDeploymentStatus(
+    """
+    Parameters for CreateDeploymentStatus
+    """
+    input: CreateDeploymentStatusInput!
+  ): CreateDeploymentStatusPayload @preview(toggledBy: "flash-preview")
+
+  """
+  Create a discussion.
+  """
+  createDiscussion(
+    """
+    Parameters for CreateDiscussion
+    """
+    input: CreateDiscussionInput!
+  ): CreateDiscussionPayload
+
+  """
+  Creates an organization as part of an enterprise account. A personal access
+  token used to create an organization is implicitly permitted to update the
+  organization it created, if the organization is part of an enterprise that has
+  SAML enabled or uses Enterprise Managed Users. If the organization is not part
+  of such an enterprise, and instead has SAML enabled for it individually, the
+  token will then require SAML authorization to continue working against that organization.
+  """
+  createEnterpriseOrganization(
+    """
+    Parameters for CreateEnterpriseOrganization
+    """
+    input: CreateEnterpriseOrganizationInput!
+  ): CreateEnterpriseOrganizationPayload
+
+  """
+  Creates an environment or simply returns it if already exists.
+  """
+  createEnvironment(
+    """
+    Parameters for CreateEnvironment
+    """
+    input: CreateEnvironmentInput!
+  ): CreateEnvironmentPayload
+
+  """
+  Creates a new IP allow list entry.
+  """
+  createIpAllowListEntry(
+    """
+    Parameters for CreateIpAllowListEntry
+    """
+    input: CreateIpAllowListEntryInput!
+  ): CreateIpAllowListEntryPayload
+
+  """
+  Creates a new issue.
+  """
+  createIssue(
+    """
+    Parameters for CreateIssue
+    """
+    input: CreateIssueInput!
+  ): CreateIssuePayload
+
+  """
+  Creates a new label.
+  """
+  createLabel(
+    """
+    Parameters for CreateLabel
+    """
+    input: CreateLabelInput!
+  ): CreateLabelPayload @preview(toggledBy: "bane-preview")
+
+  """
+  Create a branch linked to an issue.
+  """
+  createLinkedBranch(
+    """
+    Parameters for CreateLinkedBranch
+    """
+    input: CreateLinkedBranchInput!
+  ): CreateLinkedBranchPayload
+
+  """
+  Creates a GitHub Enterprise Importer (GEI) migration source.
+  """
+  createMigrationSource(
+    """
+    Parameters for CreateMigrationSource
+    """
+    input: CreateMigrationSourceInput!
+  ): CreateMigrationSourcePayload
+
+  """
+  Creates a new project.
+  """
+  createProject(
+    """
+    Parameters for CreateProject
+    """
+    input: CreateProjectInput!
+  ): CreateProjectPayload
+
+  """
+  Creates a new project.
+  """
+  createProjectV2(
+    """
+    Parameters for CreateProjectV2
+    """
+    input: CreateProjectV2Input!
+  ): CreateProjectV2Payload
+
+  """
+  Create a new project field.
+  """
+  createProjectV2Field(
+    """
+    Parameters for CreateProjectV2Field
+    """
+    input: CreateProjectV2FieldInput!
+  ): CreateProjectV2FieldPayload
+
+  """
+  Create a new pull request
+  """
+  createPullRequest(
+    """
+    Parameters for CreatePullRequest
+    """
+    input: CreatePullRequestInput!
+  ): CreatePullRequestPayload
+
+  """
+  Create a new Git Ref.
+  """
+  createRef(
+    """
+    Parameters for CreateRef
+    """
+    input: CreateRefInput!
+  ): CreateRefPayload
+
+  """
+  Create a new repository.
+  """
+  createRepository(
+    """
+    Parameters for CreateRepository
+    """
+    input: CreateRepositoryInput!
+  ): CreateRepositoryPayload
+
+  """
+  Create a repository ruleset
+  """
+  createRepositoryRuleset(
+    """
+    Parameters for CreateRepositoryRuleset
+    """
+    input: CreateRepositoryRulesetInput!
+  ): CreateRepositoryRulesetPayload
+
+  """
+  Create a GitHub Sponsors profile to allow others to sponsor you or your organization.
+  """
+  createSponsorsListing(
+    """
+    Parameters for CreateSponsorsListing
+    """
+    input: CreateSponsorsListingInput!
+  ): CreateSponsorsListingPayload
+
+  """
+  Create a new payment tier for your GitHub Sponsors profile.
+  """
+  createSponsorsTier(
+    """
+    Parameters for CreateSponsorsTier
+    """
+    input: CreateSponsorsTierInput!
+  ): CreateSponsorsTierPayload
+
+  """
+  Start a new sponsorship of a maintainer in GitHub Sponsors, or reactivate a past sponsorship.
+  """
+  createSponsorship(
+    """
+    Parameters for CreateSponsorship
+    """
+    input: CreateSponsorshipInput!
+  ): CreateSponsorshipPayload
+
+  """
+  Make many one-time sponsorships for different sponsorable users or
+  organizations at once. Can only sponsor those who have a public GitHub
+  Sponsors profile.
+  """
+  createSponsorships(
+    """
+    Parameters for CreateSponsorships
+    """
+    input: CreateSponsorshipsInput!
+  ): CreateSponsorshipsPayload
+
+  """
+  Creates a new team discussion.
+  """
+  createTeamDiscussion(
+    """
+    Parameters for CreateTeamDiscussion
+    """
+    input: CreateTeamDiscussionInput!
+  ): CreateTeamDiscussionPayload
+
+  """
+  Creates a new team discussion comment.
+  """
+  createTeamDiscussionComment(
+    """
+    Parameters for CreateTeamDiscussionComment
+    """
+    input: CreateTeamDiscussionCommentInput!
+  ): CreateTeamDiscussionCommentPayload
+
+  """
+  Rejects a suggested topic for the repository.
+  """
+  declineTopicSuggestion(
+    """
+    Parameters for DeclineTopicSuggestion
+    """
+    input: DeclineTopicSuggestionInput!
+  ): DeclineTopicSuggestionPayload
+
+  """
+  Delete a branch protection rule
+  """
+  deleteBranchProtectionRule(
+    """
+    Parameters for DeleteBranchProtectionRule
+    """
+    input: DeleteBranchProtectionRuleInput!
+  ): DeleteBranchProtectionRulePayload
+
+  """
+  Deletes a deployment.
+  """
+  deleteDeployment(
+    """
+    Parameters for DeleteDeployment
+    """
+    input: DeleteDeploymentInput!
+  ): DeleteDeploymentPayload
+
+  """
+  Delete a discussion and all of its replies.
+  """
+  deleteDiscussion(
+    """
+    Parameters for DeleteDiscussion
+    """
+    input: DeleteDiscussionInput!
+  ): DeleteDiscussionPayload
+
+  """
+  Delete a discussion comment. If it has replies, wipe it instead.
+  """
+  deleteDiscussionComment(
+    """
+    Parameters for DeleteDiscussionComment
+    """
+    input: DeleteDiscussionCommentInput!
+  ): DeleteDiscussionCommentPayload
+
+  """
+  Deletes an environment
+  """
+  deleteEnvironment(
+    """
+    Parameters for DeleteEnvironment
+    """
+    input: DeleteEnvironmentInput!
+  ): DeleteEnvironmentPayload
+
+  """
+  Deletes an IP allow list entry.
+  """
+  deleteIpAllowListEntry(
+    """
+    Parameters for DeleteIpAllowListEntry
+    """
+    input: DeleteIpAllowListEntryInput!
+  ): DeleteIpAllowListEntryPayload
+
+  """
+  Deletes an Issue object.
+  """
+  deleteIssue(
+    """
+    Parameters for DeleteIssue
+    """
+    input: DeleteIssueInput!
+  ): DeleteIssuePayload
+
+  """
+  Deletes an IssueComment object.
+  """
+  deleteIssueComment(
+    """
+    Parameters for DeleteIssueComment
+    """
+    input: DeleteIssueCommentInput!
+  ): DeleteIssueCommentPayload
+
+  """
+  Deletes a label.
+  """
+  deleteLabel(
+    """
+    Parameters for DeleteLabel
+    """
+    input: DeleteLabelInput!
+  ): DeleteLabelPayload @preview(toggledBy: "bane-preview")
+
+  """
+  Unlink a branch from an issue.
+  """
+  deleteLinkedBranch(
+    """
+    Parameters for DeleteLinkedBranch
+    """
+    input: DeleteLinkedBranchInput!
+  ): DeleteLinkedBranchPayload
+
+  """
+  Delete a package version.
+  """
+  deletePackageVersion(
+    """
+    Parameters for DeletePackageVersion
+    """
+    input: DeletePackageVersionInput!
+  ): DeletePackageVersionPayload @preview(toggledBy: "package-deletes-preview")
+
+  """
+  Deletes a project.
+  """
+  deleteProject(
+    """
+    Parameters for DeleteProject
+    """
+    input: DeleteProjectInput!
+  ): DeleteProjectPayload
+
+  """
+  Deletes a project card.
+  """
+  deleteProjectCard(
+    """
+    Parameters for DeleteProjectCard
+    """
+    input: DeleteProjectCardInput!
+  ): DeleteProjectCardPayload
+
+  """
+  Deletes a project column.
+  """
+  deleteProjectColumn(
+    """
+    Parameters for DeleteProjectColumn
+    """
+    input: DeleteProjectColumnInput!
+  ): DeleteProjectColumnPayload
+
+  """
+  Delete a project.
+  """
+  deleteProjectV2(
+    """
+    Parameters for DeleteProjectV2
+    """
+    input: DeleteProjectV2Input!
+  ): DeleteProjectV2Payload
+
+  """
+  Delete a project field.
+  """
+  deleteProjectV2Field(
+    """
+    Parameters for DeleteProjectV2Field
+    """
+    input: DeleteProjectV2FieldInput!
+  ): DeleteProjectV2FieldPayload
+
+  """
+  Deletes an item from a Project.
+  """
+  deleteProjectV2Item(
+    """
+    Parameters for DeleteProjectV2Item
+    """
+    input: DeleteProjectV2ItemInput!
+  ): DeleteProjectV2ItemPayload
+
+  """
+  Deletes a project workflow.
+  """
+  deleteProjectV2Workflow(
+    """
+    Parameters for DeleteProjectV2Workflow
+    """
+    input: DeleteProjectV2WorkflowInput!
+  ): DeleteProjectV2WorkflowPayload
+
+  """
+  Deletes a pull request review.
+  """
+  deletePullRequestReview(
+    """
+    Parameters for DeletePullRequestReview
+    """
+    input: DeletePullRequestReviewInput!
+  ): DeletePullRequestReviewPayload
+
+  """
+  Deletes a pull request review comment.
+  """
+  deletePullRequestReviewComment(
+    """
+    Parameters for DeletePullRequestReviewComment
+    """
+    input: DeletePullRequestReviewCommentInput!
+  ): DeletePullRequestReviewCommentPayload
+
+  """
+  Delete a Git Ref.
+  """
+  deleteRef(
+    """
+    Parameters for DeleteRef
+    """
+    input: DeleteRefInput!
+  ): DeleteRefPayload
+
+  """
+  Delete a repository ruleset
+  """
+  deleteRepositoryRuleset(
+    """
+    Parameters for DeleteRepositoryRuleset
+    """
+    input: DeleteRepositoryRulesetInput!
+  ): DeleteRepositoryRulesetPayload
+
+  """
+  Deletes a team discussion.
+  """
+  deleteTeamDiscussion(
+    """
+    Parameters for DeleteTeamDiscussion
+    """
+    input: DeleteTeamDiscussionInput!
+  ): DeleteTeamDiscussionPayload
+
+  """
+  Deletes a team discussion comment.
+  """
+  deleteTeamDiscussionComment(
+    """
+    Parameters for DeleteTeamDiscussionComment
+    """
+    input: DeleteTeamDiscussionCommentInput!
+  ): DeleteTeamDiscussionCommentPayload
+
+  """
+  Deletes a verifiable domain.
+  """
+  deleteVerifiableDomain(
+    """
+    Parameters for DeleteVerifiableDomain
+    """
+    input: DeleteVerifiableDomainInput!
+  ): DeleteVerifiableDomainPayload
+
+  """
+  Remove a pull request from the merge queue.
+  """
+  dequeuePullRequest(
+    """
+    Parameters for DequeuePullRequest
+    """
+    input: DequeuePullRequestInput!
+  ): DequeuePullRequestPayload
+
+  """
+  Disable auto merge on the given pull request
+  """
+  disablePullRequestAutoMerge(
+    """
+    Parameters for DisablePullRequestAutoMerge
+    """
+    input: DisablePullRequestAutoMergeInput!
+  ): DisablePullRequestAutoMergePayload
+
+  """
+  Dismisses an approved or rejected pull request review.
+  """
+  dismissPullRequestReview(
+    """
+    Parameters for DismissPullRequestReview
+    """
+    input: DismissPullRequestReviewInput!
+  ): DismissPullRequestReviewPayload
+
+  """
+  Dismisses the Dependabot alert.
+  """
+  dismissRepositoryVulnerabilityAlert(
+    """
+    Parameters for DismissRepositoryVulnerabilityAlert
+    """
+    input: DismissRepositoryVulnerabilityAlertInput!
+  ): DismissRepositoryVulnerabilityAlertPayload
+
+  """
+  Enable the default auto-merge on a pull request.
+  """
+  enablePullRequestAutoMerge(
+    """
+    Parameters for EnablePullRequestAutoMerge
+    """
+    input: EnablePullRequestAutoMergeInput!
+  ): EnablePullRequestAutoMergePayload
+
+  """
+  Add a pull request to the merge queue.
+  """
+  enqueuePullRequest(
+    """
+    Parameters for EnqueuePullRequest
+    """
+    input: EnqueuePullRequestInput!
+  ): EnqueuePullRequestPayload
+
+  """
+  Follow an organization.
+  """
+  followOrganization(
+    """
+    Parameters for FollowOrganization
+    """
+    input: FollowOrganizationInput!
+  ): FollowOrganizationPayload
+
+  """
+  Follow a user.
+  """
+  followUser(
+    """
+    Parameters for FollowUser
+    """
+    input: FollowUserInput!
+  ): FollowUserPayload
+
+  """
+  Grant the migrator role to a user for all organizations under an enterprise account.
+  """
+  grantEnterpriseOrganizationsMigratorRole(
+    """
+    Parameters for GrantEnterpriseOrganizationsMigratorRole
+    """
+    input: GrantEnterpriseOrganizationsMigratorRoleInput!
+  ): GrantEnterpriseOrganizationsMigratorRolePayload
+
+  """
+  Grant the migrator role to a user or a team.
+  """
+  grantMigratorRole(
+    """
+    Parameters for GrantMigratorRole
+    """
+    input: GrantMigratorRoleInput!
+  ): GrantMigratorRolePayload
+
+  """
+  Creates a new project by importing columns and a list of issues/PRs.
+  """
+  importProject(
+    """
+    Parameters for ImportProject
+    """
+    input: ImportProjectInput!
+  ): ImportProjectPayload @preview(toggledBy: "slothette-preview")
+
+  """
+  Invite someone to become an administrator of the enterprise.
+  """
+  inviteEnterpriseAdmin(
+    """
+    Parameters for InviteEnterpriseAdmin
+    """
+    input: InviteEnterpriseAdminInput!
+  ): InviteEnterpriseAdminPayload
+
+  """
+  Links a project to a repository.
+  """
+  linkProjectV2ToRepository(
+    """
+    Parameters for LinkProjectV2ToRepository
+    """
+    input: LinkProjectV2ToRepositoryInput!
+  ): LinkProjectV2ToRepositoryPayload
+
+  """
+  Links a project to a team.
+  """
+  linkProjectV2ToTeam(
+    """
+    Parameters for LinkProjectV2ToTeam
+    """
+    input: LinkProjectV2ToTeamInput!
+  ): LinkProjectV2ToTeamPayload
+
+  """
+  Creates a repository link for a project.
+  """
+  linkRepositoryToProject(
+    """
+    Parameters for LinkRepositoryToProject
+    """
+    input: LinkRepositoryToProjectInput!
+  ): LinkRepositoryToProjectPayload
+
+  """
+  Lock a lockable object
+  """
+  lockLockable(
+    """
+    Parameters for LockLockable
+    """
+    input: LockLockableInput!
+  ): LockLockablePayload
+
+  """
+  Mark a discussion comment as the chosen answer for discussions in an answerable category.
+  """
+  markDiscussionCommentAsAnswer(
+    """
+    Parameters for MarkDiscussionCommentAsAnswer
+    """
+    input: MarkDiscussionCommentAsAnswerInput!
+  ): MarkDiscussionCommentAsAnswerPayload
+
+  """
+  Mark a pull request file as viewed
+  """
+  markFileAsViewed(
+    """
+    Parameters for MarkFileAsViewed
+    """
+    input: MarkFileAsViewedInput!
+  ): MarkFileAsViewedPayload
+
+  """
+  Mark a project as a template. Note that only projects which are owned by an Organization can be marked as a template.
+  """
+  markProjectV2AsTemplate(
+    """
+    Parameters for MarkProjectV2AsTemplate
+    """
+    input: MarkProjectV2AsTemplateInput!
+  ): MarkProjectV2AsTemplatePayload
+
+  """
+  Marks a pull request ready for review.
+  """
+  markPullRequestReadyForReview(
+    """
+    Parameters for MarkPullRequestReadyForReview
+    """
+    input: MarkPullRequestReadyForReviewInput!
+  ): MarkPullRequestReadyForReviewPayload
+
+  """
+  Merge a head into a branch.
+  """
+  mergeBranch(
+    """
+    Parameters for MergeBranch
+    """
+    input: MergeBranchInput!
+  ): MergeBranchPayload
+
+  """
+  Merge a pull request.
+  """
+  mergePullRequest(
+    """
+    Parameters for MergePullRequest
+    """
+    input: MergePullRequestInput!
+  ): MergePullRequestPayload
+
+  """
+  Minimizes a comment on an Issue, Commit, Pull Request, or Gist
+  """
+  minimizeComment(
+    """
+    Parameters for MinimizeComment
+    """
+    input: MinimizeCommentInput!
+  ): MinimizeCommentPayload
+
+  """
+  Moves a project card to another place.
+  """
+  moveProjectCard(
+    """
+    Parameters for MoveProjectCard
+    """
+    input: MoveProjectCardInput!
+  ): MoveProjectCardPayload
+
+  """
+  Moves a project column to another place.
+  """
+  moveProjectColumn(
+    """
+    Parameters for MoveProjectColumn
+    """
+    input: MoveProjectColumnInput!
+  ): MoveProjectColumnPayload
+
+  """
+  Pin an issue to a repository
+  """
+  pinIssue(
+    """
+    Parameters for PinIssue
+    """
+    input: PinIssueInput!
+  ): PinIssuePayload
+
+  """
+  Publish an existing sponsorship tier that is currently still a draft to a GitHub Sponsors profile.
+  """
+  publishSponsorsTier(
+    """
+    Parameters for PublishSponsorsTier
+    """
+    input: PublishSponsorsTierInput!
+  ): PublishSponsorsTierPayload
+
+  """
+  Regenerates the identity provider recovery codes for an enterprise
+  """
+  regenerateEnterpriseIdentityProviderRecoveryCodes(
+    """
+    Parameters for RegenerateEnterpriseIdentityProviderRecoveryCodes
+    """
+    input: RegenerateEnterpriseIdentityProviderRecoveryCodesInput!
+  ): RegenerateEnterpriseIdentityProviderRecoveryCodesPayload
+
+  """
+  Regenerates a verifiable domain's verification token.
+  """
+  regenerateVerifiableDomainToken(
+    """
+    Parameters for RegenerateVerifiableDomainToken
+    """
+    input: RegenerateVerifiableDomainTokenInput!
+  ): RegenerateVerifiableDomainTokenPayload
+
+  """
+  Reject all pending deployments under one or more environments
+  """
+  rejectDeployments(
+    """
+    Parameters for RejectDeployments
+    """
+    input: RejectDeploymentsInput!
+  ): RejectDeploymentsPayload
+
+  """
+  Removes assignees from an assignable object.
+  """
+  removeAssigneesFromAssignable(
+    """
+    Parameters for RemoveAssigneesFromAssignable
+    """
+    input: RemoveAssigneesFromAssignableInput!
+  ): RemoveAssigneesFromAssignablePayload
+
+  """
+  Removes an administrator from the enterprise.
+  """
+  removeEnterpriseAdmin(
+    """
+    Parameters for RemoveEnterpriseAdmin
+    """
+    input: RemoveEnterpriseAdminInput!
+  ): RemoveEnterpriseAdminPayload
+
+  """
+  Removes the identity provider from an enterprise
+  """
+  removeEnterpriseIdentityProvider(
+    """
+    Parameters for RemoveEnterpriseIdentityProvider
+    """
+    input: RemoveEnterpriseIdentityProviderInput!
+  ): RemoveEnterpriseIdentityProviderPayload
+
+  """
+  Removes a user from all organizations within the enterprise
+  """
+  removeEnterpriseMember(
+    """
+    Parameters for RemoveEnterpriseMember
+    """
+    input: RemoveEnterpriseMemberInput!
+  ): RemoveEnterpriseMemberPayload
+
+  """
+  Removes an organization from the enterprise
+  """
+  removeEnterpriseOrganization(
+    """
+    Parameters for RemoveEnterpriseOrganization
+    """
+    input: RemoveEnterpriseOrganizationInput!
+  ): RemoveEnterpriseOrganizationPayload
+
+  """
+  Removes a support entitlement from an enterprise member.
+  """
+  removeEnterpriseSupportEntitlement(
+    """
+    Parameters for RemoveEnterpriseSupportEntitlement
+    """
+    input: RemoveEnterpriseSupportEntitlementInput!
+  ): RemoveEnterpriseSupportEntitlementPayload
+
+  """
+  Removes labels from a Labelable object.
+  """
+  removeLabelsFromLabelable(
+    """
+    Parameters for RemoveLabelsFromLabelable
+    """
+    input: RemoveLabelsFromLabelableInput!
+  ): RemoveLabelsFromLabelablePayload
+
+  """
+  Removes outside collaborator from all repositories in an organization.
+  """
+  removeOutsideCollaborator(
+    """
+    Parameters for RemoveOutsideCollaborator
+    """
+    input: RemoveOutsideCollaboratorInput!
+  ): RemoveOutsideCollaboratorPayload
+
+  """
+  Removes a reaction from a subject.
+  """
+  removeReaction(
+    """
+    Parameters for RemoveReaction
+    """
+    input: RemoveReactionInput!
+  ): RemoveReactionPayload
+
+  """
+  Removes a star from a Starrable.
+  """
+  removeStar(
+    """
+    Parameters for RemoveStar
+    """
+    input: RemoveStarInput!
+  ): RemoveStarPayload
+
+  """
+  Remove an upvote to a discussion or discussion comment.
+  """
+  removeUpvote(
+    """
+    Parameters for RemoveUpvote
+    """
+    input: RemoveUpvoteInput!
+  ): RemoveUpvotePayload
+
+  """
+  Reopen a discussion.
+  """
+  reopenDiscussion(
+    """
+    Parameters for ReopenDiscussion
+    """
+    input: ReopenDiscussionInput!
+  ): ReopenDiscussionPayload
+
+  """
+  Reopen a issue.
+  """
+  reopenIssue(
+    """
+    Parameters for ReopenIssue
+    """
+    input: ReopenIssueInput!
+  ): ReopenIssuePayload
+
+  """
+  Reopen a pull request.
+  """
+  reopenPullRequest(
+    """
+    Parameters for ReopenPullRequest
+    """
+    input: ReopenPullRequestInput!
+  ): ReopenPullRequestPayload
+
+  """
+  Set review requests on a pull request.
+  """
+  requestReviews(
+    """
+    Parameters for RequestReviews
+    """
+    input: RequestReviewsInput!
+  ): RequestReviewsPayload
+
+  """
+  Rerequests an existing check suite.
+  """
+  rerequestCheckSuite(
+    """
+    Parameters for RerequestCheckSuite
+    """
+    input: RerequestCheckSuiteInput!
+  ): RerequestCheckSuitePayload
+
+  """
+  Marks a review thread as resolved.
+  """
+  resolveReviewThread(
+    """
+    Parameters for ResolveReviewThread
+    """
+    input: ResolveReviewThreadInput!
+  ): ResolveReviewThreadPayload
+
+  """
+  Retire a published payment tier from your GitHub Sponsors profile so it cannot be used to start new sponsorships.
+  """
+  retireSponsorsTier(
+    """
+    Parameters for RetireSponsorsTier
+    """
+    input: RetireSponsorsTierInput!
+  ): RetireSponsorsTierPayload
+
+  """
+  Create a pull request that reverts the changes from a merged pull request.
+  """
+  revertPullRequest(
+    """
+    Parameters for RevertPullRequest
+    """
+    input: RevertPullRequestInput!
+  ): RevertPullRequestPayload
+
+  """
+  Revoke the migrator role to a user for all organizations under an enterprise account.
+  """
+  revokeEnterpriseOrganizationsMigratorRole(
+    """
+    Parameters for RevokeEnterpriseOrganizationsMigratorRole
+    """
+    input: RevokeEnterpriseOrganizationsMigratorRoleInput!
+  ): RevokeEnterpriseOrganizationsMigratorRolePayload
+
+  """
+  Revoke the migrator role from a user or a team.
+  """
+  revokeMigratorRole(
+    """
+    Parameters for RevokeMigratorRole
+    """
+    input: RevokeMigratorRoleInput!
+  ): RevokeMigratorRolePayload
+
+  """
+  Creates or updates the identity provider for an enterprise.
+  """
+  setEnterpriseIdentityProvider(
+    """
+    Parameters for SetEnterpriseIdentityProvider
+    """
+    input: SetEnterpriseIdentityProviderInput!
+  ): SetEnterpriseIdentityProviderPayload
+
+  """
+  Set an organization level interaction limit for an organization's public repositories.
+  """
+  setOrganizationInteractionLimit(
+    """
+    Parameters for SetOrganizationInteractionLimit
+    """
+    input: SetOrganizationInteractionLimitInput!
+  ): SetOrganizationInteractionLimitPayload
+
+  """
+  Sets an interaction limit setting for a repository.
+  """
+  setRepositoryInteractionLimit(
+    """
+    Parameters for SetRepositoryInteractionLimit
+    """
+    input: SetRepositoryInteractionLimitInput!
+  ): SetRepositoryInteractionLimitPayload
+
+  """
+  Set a user level interaction limit for an user's public repositories.
+  """
+  setUserInteractionLimit(
+    """
+    Parameters for SetUserInteractionLimit
+    """
+    input: SetUserInteractionLimitInput!
+  ): SetUserInteractionLimitPayload
+
+  """
+  Starts a GitHub Enterprise Importer organization migration.
+  """
+  startOrganizationMigration(
+    """
+    Parameters for StartOrganizationMigration
+    """
+    input: StartOrganizationMigrationInput!
+  ): StartOrganizationMigrationPayload
+
+  """
+  Starts a GitHub Enterprise Importer (GEI) repository migration.
+  """
+  startRepositoryMigration(
+    """
+    Parameters for StartRepositoryMigration
+    """
+    input: StartRepositoryMigrationInput!
+  ): StartRepositoryMigrationPayload
+
+  """
+  Submits a pending pull request review.
+  """
+  submitPullRequestReview(
+    """
+    Parameters for SubmitPullRequestReview
+    """
+    input: SubmitPullRequestReviewInput!
+  ): SubmitPullRequestReviewPayload
+
+  """
+  Transfer an organization from one enterprise to another enterprise.
+  """
+  transferEnterpriseOrganization(
+    """
+    Parameters for TransferEnterpriseOrganization
+    """
+    input: TransferEnterpriseOrganizationInput!
+  ): TransferEnterpriseOrganizationPayload
+
+  """
+  Transfer an issue to a different repository
+  """
+  transferIssue(
+    """
+    Parameters for TransferIssue
+    """
+    input: TransferIssueInput!
+  ): TransferIssuePayload
+
+  """
+  Unarchives a ProjectV2Item
+  """
+  unarchiveProjectV2Item(
+    """
+    Parameters for UnarchiveProjectV2Item
+    """
+    input: UnarchiveProjectV2ItemInput!
+  ): UnarchiveProjectV2ItemPayload
+
+  """
+  Unarchives a repository.
+  """
+  unarchiveRepository(
+    """
+    Parameters for UnarchiveRepository
+    """
+    input: UnarchiveRepositoryInput!
+  ): UnarchiveRepositoryPayload
+
+  """
+  Unfollow an organization.
+  """
+  unfollowOrganization(
+    """
+    Parameters for UnfollowOrganization
+    """
+    input: UnfollowOrganizationInput!
+  ): UnfollowOrganizationPayload
+
+  """
+  Unfollow a user.
+  """
+  unfollowUser(
+    """
+    Parameters for UnfollowUser
+    """
+    input: UnfollowUserInput!
+  ): UnfollowUserPayload
+
+  """
+  Unlinks a project from a repository.
+  """
+  unlinkProjectV2FromRepository(
+    """
+    Parameters for UnlinkProjectV2FromRepository
+    """
+    input: UnlinkProjectV2FromRepositoryInput!
+  ): UnlinkProjectV2FromRepositoryPayload
+
+  """
+  Unlinks a project to a team.
+  """
+  unlinkProjectV2FromTeam(
+    """
+    Parameters for UnlinkProjectV2FromTeam
+    """
+    input: UnlinkProjectV2FromTeamInput!
+  ): UnlinkProjectV2FromTeamPayload
+
+  """
+  Deletes a repository link from a project.
+  """
+  unlinkRepositoryFromProject(
+    """
+    Parameters for UnlinkRepositoryFromProject
+    """
+    input: UnlinkRepositoryFromProjectInput!
+  ): UnlinkRepositoryFromProjectPayload
+
+  """
+  Unlock a lockable object
+  """
+  unlockLockable(
+    """
+    Parameters for UnlockLockable
+    """
+    input: UnlockLockableInput!
+  ): UnlockLockablePayload
+
+  """
+  Unmark a discussion comment as the chosen answer for discussions in an answerable category.
+  """
+  unmarkDiscussionCommentAsAnswer(
+    """
+    Parameters for UnmarkDiscussionCommentAsAnswer
+    """
+    input: UnmarkDiscussionCommentAsAnswerInput!
+  ): UnmarkDiscussionCommentAsAnswerPayload
+
+  """
+  Unmark a pull request file as viewed
+  """
+  unmarkFileAsViewed(
+    """
+    Parameters for UnmarkFileAsViewed
+    """
+    input: UnmarkFileAsViewedInput!
+  ): UnmarkFileAsViewedPayload
+
+  """
+  Unmark an issue as a duplicate of another issue.
+  """
+  unmarkIssueAsDuplicate(
+    """
+    Parameters for UnmarkIssueAsDuplicate
+    """
+    input: UnmarkIssueAsDuplicateInput!
+  ): UnmarkIssueAsDuplicatePayload
+
+  """
+  Unmark a project as a template.
+  """
+  unmarkProjectV2AsTemplate(
+    """
+    Parameters for UnmarkProjectV2AsTemplate
+    """
+    input: UnmarkProjectV2AsTemplateInput!
+  ): UnmarkProjectV2AsTemplatePayload
+
+  """
+  Unminimizes a comment on an Issue, Commit, Pull Request, or Gist
+  """
+  unminimizeComment(
+    """
+    Parameters for UnminimizeComment
+    """
+    input: UnminimizeCommentInput!
+  ): UnminimizeCommentPayload
+
+  """
+  Unpin a pinned issue from a repository
+  """
+  unpinIssue(
+    """
+    Parameters for UnpinIssue
+    """
+    input: UnpinIssueInput!
+  ): UnpinIssuePayload
+
+  """
+  Marks a review thread as unresolved.
+  """
+  unresolveReviewThread(
+    """
+    Parameters for UnresolveReviewThread
+    """
+    input: UnresolveReviewThreadInput!
+  ): UnresolveReviewThreadPayload
+
+  """
+  Update a branch protection rule
+  """
+  updateBranchProtectionRule(
+    """
+    Parameters for UpdateBranchProtectionRule
+    """
+    input: UpdateBranchProtectionRuleInput!
+  ): UpdateBranchProtectionRulePayload
+
+  """
+  Update a check run
+  """
+  updateCheckRun(
+    """
+    Parameters for UpdateCheckRun
+    """
+    input: UpdateCheckRunInput!
+  ): UpdateCheckRunPayload
+
+  """
+  Modifies the settings of an existing check suite
+  """
+  updateCheckSuitePreferences(
+    """
+    Parameters for UpdateCheckSuitePreferences
+    """
+    input: UpdateCheckSuitePreferencesInput!
+  ): UpdateCheckSuitePreferencesPayload
+
+  """
+  Update a discussion
+  """
+  updateDiscussion(
+    """
+    Parameters for UpdateDiscussion
+    """
+    input: UpdateDiscussionInput!
+  ): UpdateDiscussionPayload
+
+  """
+  Update the contents of a comment on a Discussion
+  """
+  updateDiscussionComment(
+    """
+    Parameters for UpdateDiscussionComment
+    """
+    input: UpdateDiscussionCommentInput!
+  ): UpdateDiscussionCommentPayload
+
+  """
+  Updates the role of an enterprise administrator.
+  """
+  updateEnterpriseAdministratorRole(
+    """
+    Parameters for UpdateEnterpriseAdministratorRole
+    """
+    input: UpdateEnterpriseAdministratorRoleInput!
+  ): UpdateEnterpriseAdministratorRolePayload
+
+  """
+  Sets whether private repository forks are enabled for an enterprise.
+  """
+  updateEnterpriseAllowPrivateRepositoryForkingSetting(
+    """
+    Parameters for UpdateEnterpriseAllowPrivateRepositoryForkingSetting
+    """
+    input: UpdateEnterpriseAllowPrivateRepositoryForkingSettingInput!
+  ): UpdateEnterpriseAllowPrivateRepositoryForkingSettingPayload
+
+  """
+  Sets the base repository permission for organizations in an enterprise.
+  """
+  updateEnterpriseDefaultRepositoryPermissionSetting(
+    """
+    Parameters for UpdateEnterpriseDefaultRepositoryPermissionSetting
+    """
+    input: UpdateEnterpriseDefaultRepositoryPermissionSettingInput!
+  ): UpdateEnterpriseDefaultRepositoryPermissionSettingPayload
+
+  """
+  Sets whether organization members with admin permissions on a repository can change repository visibility.
+  """
+  updateEnterpriseMembersCanChangeRepositoryVisibilitySetting(
+    """
+    Parameters for UpdateEnterpriseMembersCanChangeRepositoryVisibilitySetting
+    """
+    input: UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingInput!
+  ): UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingPayload
+
+  """
+  Sets the members can create repositories setting for an enterprise.
+  """
+  updateEnterpriseMembersCanCreateRepositoriesSetting(
+    """
+    Parameters for UpdateEnterpriseMembersCanCreateRepositoriesSetting
+    """
+    input: UpdateEnterpriseMembersCanCreateRepositoriesSettingInput!
+  ): UpdateEnterpriseMembersCanCreateRepositoriesSettingPayload
+
+  """
+  Sets the members can delete issues setting for an enterprise.
+  """
+  updateEnterpriseMembersCanDeleteIssuesSetting(
+    """
+    Parameters for UpdateEnterpriseMembersCanDeleteIssuesSetting
+    """
+    input: UpdateEnterpriseMembersCanDeleteIssuesSettingInput!
+  ): UpdateEnterpriseMembersCanDeleteIssuesSettingPayload
+
+  """
+  Sets the members can delete repositories setting for an enterprise.
+  """
+  updateEnterpriseMembersCanDeleteRepositoriesSetting(
+    """
+    Parameters for UpdateEnterpriseMembersCanDeleteRepositoriesSetting
+    """
+    input: UpdateEnterpriseMembersCanDeleteRepositoriesSettingInput!
+  ): UpdateEnterpriseMembersCanDeleteRepositoriesSettingPayload
+
+  """
+  Sets whether members can invite collaborators are enabled for an enterprise.
+  """
+  updateEnterpriseMembersCanInviteCollaboratorsSetting(
+    """
+    Parameters for UpdateEnterpriseMembersCanInviteCollaboratorsSetting
+    """
+    input: UpdateEnterpriseMembersCanInviteCollaboratorsSettingInput!
+  ): UpdateEnterpriseMembersCanInviteCollaboratorsSettingPayload
+
+  """
+  Sets whether or not an organization owner can make purchases.
+  """
+  updateEnterpriseMembersCanMakePurchasesSetting(
+    """
+    Parameters for UpdateEnterpriseMembersCanMakePurchasesSetting
+    """
+    input: UpdateEnterpriseMembersCanMakePurchasesSettingInput!
+  ): UpdateEnterpriseMembersCanMakePurchasesSettingPayload
+
+  """
+  Sets the members can update protected branches setting for an enterprise.
+  """
+  updateEnterpriseMembersCanUpdateProtectedBranchesSetting(
+    """
+    Parameters for UpdateEnterpriseMembersCanUpdateProtectedBranchesSetting
+    """
+    input: UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingInput!
+  ): UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingPayload
+
+  """
+  Sets the members can view dependency insights for an enterprise.
+  """
+  updateEnterpriseMembersCanViewDependencyInsightsSetting(
+    """
+    Parameters for UpdateEnterpriseMembersCanViewDependencyInsightsSetting
+    """
+    input: UpdateEnterpriseMembersCanViewDependencyInsightsSettingInput!
+  ): UpdateEnterpriseMembersCanViewDependencyInsightsSettingPayload
+
+  """
+  Sets whether organization projects are enabled for an enterprise.
+  """
+  updateEnterpriseOrganizationProjectsSetting(
+    """
+    Parameters for UpdateEnterpriseOrganizationProjectsSetting
+    """
+    input: UpdateEnterpriseOrganizationProjectsSettingInput!
+  ): UpdateEnterpriseOrganizationProjectsSettingPayload
+
+  """
+  Updates the role of an enterprise owner with an organization.
+  """
+  updateEnterpriseOwnerOrganizationRole(
+    """
+    Parameters for UpdateEnterpriseOwnerOrganizationRole
+    """
+    input: UpdateEnterpriseOwnerOrganizationRoleInput!
+  ): UpdateEnterpriseOwnerOrganizationRolePayload
+
+  """
+  Updates an enterprise's profile.
+  """
+  updateEnterpriseProfile(
+    """
+    Parameters for UpdateEnterpriseProfile
+    """
+    input: UpdateEnterpriseProfileInput!
+  ): UpdateEnterpriseProfilePayload
+
+  """
+  Sets whether repository projects are enabled for a enterprise.
+  """
+  updateEnterpriseRepositoryProjectsSetting(
+    """
+    Parameters for UpdateEnterpriseRepositoryProjectsSetting
+    """
+    input: UpdateEnterpriseRepositoryProjectsSettingInput!
+  ): UpdateEnterpriseRepositoryProjectsSettingPayload
+
+  """
+  Sets whether team discussions are enabled for an enterprise.
+  """
+  updateEnterpriseTeamDiscussionsSetting(
+    """
+    Parameters for UpdateEnterpriseTeamDiscussionsSetting
+    """
+    input: UpdateEnterpriseTeamDiscussionsSettingInput!
+  ): UpdateEnterpriseTeamDiscussionsSettingPayload
+
+  """
+  Sets whether two factor authentication is required for all users in an enterprise.
+  """
+  updateEnterpriseTwoFactorAuthenticationRequiredSetting(
+    """
+    Parameters for UpdateEnterpriseTwoFactorAuthenticationRequiredSetting
+    """
+    input: UpdateEnterpriseTwoFactorAuthenticationRequiredSettingInput!
+  ): UpdateEnterpriseTwoFactorAuthenticationRequiredSettingPayload
+
+  """
+  Updates an environment.
+  """
+  updateEnvironment(
+    """
+    Parameters for UpdateEnvironment
+    """
+    input: UpdateEnvironmentInput!
+  ): UpdateEnvironmentPayload
+
+  """
+  Sets whether an IP allow list is enabled on an owner.
+  """
+  updateIpAllowListEnabledSetting(
+    """
+    Parameters for UpdateIpAllowListEnabledSetting
+    """
+    input: UpdateIpAllowListEnabledSettingInput!
+  ): UpdateIpAllowListEnabledSettingPayload
+
+  """
+  Updates an IP allow list entry.
+  """
+  updateIpAllowListEntry(
+    """
+    Parameters for UpdateIpAllowListEntry
+    """
+    input: UpdateIpAllowListEntryInput!
+  ): UpdateIpAllowListEntryPayload
+
+  """
+  Sets whether IP allow list configuration for installed GitHub Apps is enabled on an owner.
+  """
+  updateIpAllowListForInstalledAppsEnabledSetting(
+    """
+    Parameters for UpdateIpAllowListForInstalledAppsEnabledSetting
+    """
+    input: UpdateIpAllowListForInstalledAppsEnabledSettingInput!
+  ): UpdateIpAllowListForInstalledAppsEnabledSettingPayload
+
+  """
+  Updates an Issue.
+  """
+  updateIssue(
+    """
+    Parameters for UpdateIssue
+    """
+    input: UpdateIssueInput!
+  ): UpdateIssuePayload
+
+  """
+  Updates an IssueComment object.
+  """
+  updateIssueComment(
+    """
+    Parameters for UpdateIssueComment
+    """
+    input: UpdateIssueCommentInput!
+  ): UpdateIssueCommentPayload
+
+  """
+  Updates an existing label.
+  """
+  updateLabel(
+    """
+    Parameters for UpdateLabel
+    """
+    input: UpdateLabelInput!
+  ): UpdateLabelPayload @preview(toggledBy: "bane-preview")
+
+  """
+  Update the setting to restrict notifications to only verified or approved domains available to an owner.
+  """
+  updateNotificationRestrictionSetting(
+    """
+    Parameters for UpdateNotificationRestrictionSetting
+    """
+    input: UpdateNotificationRestrictionSettingInput!
+  ): UpdateNotificationRestrictionSettingPayload
+
+  """
+  Sets whether private repository forks are enabled for an organization.
+  """
+  updateOrganizationAllowPrivateRepositoryForkingSetting(
+    """
+    Parameters for UpdateOrganizationAllowPrivateRepositoryForkingSetting
+    """
+    input: UpdateOrganizationAllowPrivateRepositoryForkingSettingInput!
+  ): UpdateOrganizationAllowPrivateRepositoryForkingSettingPayload
+
+  """
+  Sets whether contributors are required to sign off on web-based commits for repositories in an organization.
+  """
+  updateOrganizationWebCommitSignoffSetting(
+    """
+    Parameters for UpdateOrganizationWebCommitSignoffSetting
+    """
+    input: UpdateOrganizationWebCommitSignoffSettingInput!
+  ): UpdateOrganizationWebCommitSignoffSettingPayload
+
+  """
+  Toggle the setting for your GitHub Sponsors profile that allows other GitHub
+  accounts to sponsor you on GitHub while paying for the sponsorship on Patreon.
+  Only applicable when you have a GitHub Sponsors profile and have connected
+  your GitHub account with Patreon.
+  """
+  updatePatreonSponsorability(
+    """
+    Parameters for UpdatePatreonSponsorability
+    """
+    input: UpdatePatreonSponsorabilityInput!
+  ): UpdatePatreonSponsorabilityPayload
+
+  """
+  Updates an existing project.
+  """
+  updateProject(
+    """
+    Parameters for UpdateProject
+    """
+    input: UpdateProjectInput!
+  ): UpdateProjectPayload
+
+  """
+  Updates an existing project card.
+  """
+  updateProjectCard(
+    """
+    Parameters for UpdateProjectCard
+    """
+    input: UpdateProjectCardInput!
+  ): UpdateProjectCardPayload
+
+  """
+  Updates an existing project column.
+  """
+  updateProjectColumn(
+    """
+    Parameters for UpdateProjectColumn
+    """
+    input: UpdateProjectColumnInput!
+  ): UpdateProjectColumnPayload
+
+  """
+  Updates an existing project (beta).
+  """
+  updateProjectV2(
+    """
+    Parameters for UpdateProjectV2
+    """
+    input: UpdateProjectV2Input!
+  ): UpdateProjectV2Payload
+
+  """
+  Update the collaborators on a team or a project
+  """
+  updateProjectV2Collaborators(
+    """
+    Parameters for UpdateProjectV2Collaborators
+    """
+    input: UpdateProjectV2CollaboratorsInput!
+  ): UpdateProjectV2CollaboratorsPayload
+
+  """
+  Updates a draft issue within a Project.
+  """
+  updateProjectV2DraftIssue(
+    """
+    Parameters for UpdateProjectV2DraftIssue
+    """
+    input: UpdateProjectV2DraftIssueInput!
+  ): UpdateProjectV2DraftIssuePayload
+
+  """
+  This mutation updates the value of a field for an item in a Project. Currently
+  only single-select, text, number, date, and iteration fields are supported.
+  """
+  updateProjectV2ItemFieldValue(
+    """
+    Parameters for UpdateProjectV2ItemFieldValue
+    """
+    input: UpdateProjectV2ItemFieldValueInput!
+  ): UpdateProjectV2ItemFieldValuePayload
+
+  """
+  This mutation updates the position of the item in the project, where the position represents the priority of an item.
+  """
+  updateProjectV2ItemPosition(
+    """
+    Parameters for UpdateProjectV2ItemPosition
+    """
+    input: UpdateProjectV2ItemPositionInput!
+  ): UpdateProjectV2ItemPositionPayload
+
+  """
+  Update a pull request
+  """
+  updatePullRequest(
+    """
+    Parameters for UpdatePullRequest
+    """
+    input: UpdatePullRequestInput!
+  ): UpdatePullRequestPayload
+
+  """
+  Merge or Rebase HEAD from upstream branch into pull request branch
+  """
+  updatePullRequestBranch(
+    """
+    Parameters for UpdatePullRequestBranch
+    """
+    input: UpdatePullRequestBranchInput!
+  ): UpdatePullRequestBranchPayload
+
+  """
+  Updates the body of a pull request review.
+  """
+  updatePullRequestReview(
+    """
+    Parameters for UpdatePullRequestReview
+    """
+    input: UpdatePullRequestReviewInput!
+  ): UpdatePullRequestReviewPayload
+
+  """
+  Updates a pull request review comment.
+  """
+  updatePullRequestReviewComment(
+    """
+    Parameters for UpdatePullRequestReviewComment
+    """
+    input: UpdatePullRequestReviewCommentInput!
+  ): UpdatePullRequestReviewCommentPayload
+
+  """
+  Update a Git Ref.
+  """
+  updateRef(
+    """
+    Parameters for UpdateRef
+    """
+    input: UpdateRefInput!
+  ): UpdateRefPayload
+
+  """
+  Creates, updates and/or deletes multiple refs in a repository.
+
+  This mutation takes a list of `RefUpdate`s and performs these updates
+  on the repository. All updates are performed atomically, meaning that
+  if one of them is rejected, no other ref will be modified.
+
+  `RefUpdate.beforeOid` specifies that the given reference needs to point
+  to the given value before performing any updates. A value of
+  `0000000000000000000000000000000000000000` can be used to verify that
+  the references should not exist.
+
+  `RefUpdate.afterOid` specifies the value that the given reference
+  will point to after performing all updates. A value of
+  `0000000000000000000000000000000000000000` can be used to delete a
+  reference.
+
+  If `RefUpdate.force` is set to `true`, a non-fast-forward updates
+  for the given reference will be allowed.
+  """
+  updateRefs(
+    """
+    Parameters for UpdateRefs
+    """
+    input: UpdateRefsInput!
+  ): UpdateRefsPayload @preview(toggledBy: "update-refs-preview")
+
+  """
+  Update information about a repository.
+  """
+  updateRepository(
+    """
+    Parameters for UpdateRepository
+    """
+    input: UpdateRepositoryInput!
+  ): UpdateRepositoryPayload
+
+  """
+  Update a repository ruleset
+  """
+  updateRepositoryRuleset(
+    """
+    Parameters for UpdateRepositoryRuleset
+    """
+    input: UpdateRepositoryRulesetInput!
+  ): UpdateRepositoryRulesetPayload
+
+  """
+  Sets whether contributors are required to sign off on web-based commits for a repository.
+  """
+  updateRepositoryWebCommitSignoffSetting(
+    """
+    Parameters for UpdateRepositoryWebCommitSignoffSetting
+    """
+    input: UpdateRepositoryWebCommitSignoffSettingInput!
+  ): UpdateRepositoryWebCommitSignoffSettingPayload
+
+  """
+  Change visibility of your sponsorship and opt in or out of email updates from the maintainer.
+  """
+  updateSponsorshipPreferences(
+    """
+    Parameters for UpdateSponsorshipPreferences
+    """
+    input: UpdateSponsorshipPreferencesInput!
+  ): UpdateSponsorshipPreferencesPayload
+
+  """
+  Updates the state for subscribable subjects.
+  """
+  updateSubscription(
+    """
+    Parameters for UpdateSubscription
+    """
+    input: UpdateSubscriptionInput!
+  ): UpdateSubscriptionPayload
+
+  """
+  Updates a team discussion.
+  """
+  updateTeamDiscussion(
+    """
+    Parameters for UpdateTeamDiscussion
+    """
+    input: UpdateTeamDiscussionInput!
+  ): UpdateTeamDiscussionPayload
+
+  """
+  Updates a discussion comment.
+  """
+  updateTeamDiscussionComment(
+    """
+    Parameters for UpdateTeamDiscussionComment
+    """
+    input: UpdateTeamDiscussionCommentInput!
+  ): UpdateTeamDiscussionCommentPayload
+
+  """
+  Updates team review assignment.
+  """
+  updateTeamReviewAssignment(
+    """
+    Parameters for UpdateTeamReviewAssignment
+    """
+    input: UpdateTeamReviewAssignmentInput!
+  ): UpdateTeamReviewAssignmentPayload @preview(toggledBy: "stone-crop-preview")
+
+  """
+  Update team repository.
+  """
+  updateTeamsRepository(
+    """
+    Parameters for UpdateTeamsRepository
+    """
+    input: UpdateTeamsRepositoryInput!
+  ): UpdateTeamsRepositoryPayload
+
+  """
+  Replaces the repository's topics with the given topics.
+  """
+  updateTopics(
+    """
+    Parameters for UpdateTopics
+    """
+    input: UpdateTopicsInput!
+  ): UpdateTopicsPayload
+
+  """
+  Verify that a verifiable domain has the expected DNS record.
+  """
+  verifyVerifiableDomain(
+    """
+    Parameters for VerifyVerifiableDomain
+    """
+    input: VerifyVerifiableDomainInput!
+  ): VerifyVerifiableDomainPayload
+}
+
+"""
+An object with an ID.
+"""
+interface Node {
+  """
+  ID of the object.
+  """
+  id: ID!
+}
+
+"""
+The possible values for the notification restriction setting.
+"""
+enum NotificationRestrictionSettingValue {
+  """
+  The setting is disabled for the owner.
+  """
+  DISABLED
+
+  """
+  The setting is enabled for the owner.
+  """
+  ENABLED
+}
+
+"""
+An OIDC identity provider configured to provision identities for an enterprise.
+Visible to enterprise owners or enterprise owners' personal access tokens
+(classic) with read:enterprise or admin:enterprise scope.
+"""
+type OIDCProvider implements Node {
+  """
+  The enterprise this identity provider belongs to.
+  """
+  enterprise: Enterprise
+
+  """
+  ExternalIdentities provisioned by this identity provider.
+  """
+  externalIdentities(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter to external identities with the users login
+    """
+    login: String
+
+    """
+    Filter to external identities with valid org membership only
+    """
+    membersOnly: Boolean
+
+    """
+    Filter to external identities with the users userName/NameID attribute
+    """
+    userName: String
+  ): ExternalIdentityConnection!
+
+  """
+  The Node ID of the OIDCProvider object
+  """
+  id: ID!
+
+  """
+  The OIDC identity provider type
+  """
+  providerType: OIDCProviderType!
+
+  """
+  The id of the tenant this provider is attached to
+  """
+  tenantId: String!
+}
+
+"""
+The OIDC identity provider type
+"""
+enum OIDCProviderType {
+  """
+  Azure Active Directory
+  """
+  AAD
+}
+
+"""
+Metadata for an audit entry with action oauth_application.*
+"""
+interface OauthApplicationAuditEntryData {
+  """
+  The name of the OAuth application.
+  """
+  oauthApplicationName: String
+
+  """
+  The HTTP path for the OAuth application
+  """
+  oauthApplicationResourcePath: URI
+
+  """
+  The HTTP URL for the OAuth application
+  """
+  oauthApplicationUrl: URI
+}
+
+"""
+Audit log entry for a oauth_application.create event.
+"""
+type OauthApplicationCreateAuditEntry implements AuditEntry & Node & OauthApplicationAuditEntryData & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The application URL of the OAuth application.
+  """
+  applicationUrl: URI
+
+  """
+  The callback URL of the OAuth application.
+  """
+  callbackUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OauthApplicationCreateAuditEntry object
+  """
+  id: ID!
+
+  """
+  The name of the OAuth application.
+  """
+  oauthApplicationName: String
+
+  """
+  The HTTP path for the OAuth application
+  """
+  oauthApplicationResourcePath: URI
+
+  """
+  The HTTP URL for the OAuth application
+  """
+  oauthApplicationUrl: URI
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The rate limit of the OAuth application.
+  """
+  rateLimit: Int
+
+  """
+  The state of the OAuth application.
+  """
+  state: OauthApplicationCreateAuditEntryState
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The state of an OAuth application when it was created.
+"""
+enum OauthApplicationCreateAuditEntryState {
+  """
+  The OAuth application was active and allowed to have OAuth Accesses.
+  """
+  ACTIVE
+
+  """
+  The OAuth application was in the process of being deleted.
+  """
+  PENDING_DELETION
+
+  """
+  The OAuth application was suspended from generating OAuth Accesses due to abuse or security concerns.
+  """
+  SUSPENDED
+}
+
+"""
+The corresponding operation type for the action
+"""
+enum OperationType {
+  """
+  An existing resource was accessed
+  """
+  ACCESS
+
+  """
+  A resource performed an authentication event
+  """
+  AUTHENTICATION
+
+  """
+  A new resource was created
+  """
+  CREATE
+
+  """
+  An existing resource was modified
+  """
+  MODIFY
+
+  """
+  An existing resource was removed
+  """
+  REMOVE
+
+  """
+  An existing resource was restored
+  """
+  RESTORE
+
+  """
+  An existing resource was transferred between multiple resources
+  """
+  TRANSFER
+}
+
+"""
+Possible directions in which to order a list of items when provided an `orderBy` argument.
+"""
+enum OrderDirection {
+  """
+  Specifies an ascending order for a given `orderBy` argument.
+  """
+  ASC
+
+  """
+  Specifies a descending order for a given `orderBy` argument.
+  """
+  DESC
+}
+
+"""
+Audit log entry for a org.add_billing_manager
+"""
+type OrgAddBillingManagerAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgAddBillingManagerAuditEntry object
+  """
+  id: ID!
+
+  """
+  The email address used to invite a billing manager for the organization.
+  """
+  invitationEmail: String
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.add_member
+"""
+type OrgAddMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgAddMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The permission level of the member added to the organization.
+  """
+  permission: OrgAddMemberAuditEntryPermission
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The permissions available to members on an Organization.
+"""
+enum OrgAddMemberAuditEntryPermission {
+  """
+  Can read, clone, push, and add collaborators to repositories.
+  """
+  ADMIN
+
+  """
+  Can read and clone repositories.
+  """
+  READ
+}
+
+"""
+Audit log entry for a org.block_user
+"""
+type OrgBlockUserAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The blocked user.
+  """
+  blockedUser: User
+
+  """
+  The username of the blocked user.
+  """
+  blockedUserName: String
+
+  """
+  The HTTP path for the blocked user.
+  """
+  blockedUserResourcePath: URI
+
+  """
+  The HTTP URL for the blocked user.
+  """
+  blockedUserUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgBlockUserAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.config.disable_collaborators_only event.
+"""
+type OrgConfigDisableCollaboratorsOnlyAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgConfigDisableCollaboratorsOnlyAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.config.enable_collaborators_only event.
+"""
+type OrgConfigEnableCollaboratorsOnlyAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgConfigEnableCollaboratorsOnlyAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.create event.
+"""
+type OrgCreateAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The billing plan for the Organization.
+  """
+  billingPlan: OrgCreateAuditEntryBillingPlan
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgCreateAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The billing plans available for organizations.
+"""
+enum OrgCreateAuditEntryBillingPlan {
+  """
+  Team Plan
+  """
+  BUSINESS
+
+  """
+  Enterprise Cloud Plan
+  """
+  BUSINESS_PLUS
+
+  """
+  Free Plan
+  """
+  FREE
+
+  """
+  Tiered Per Seat Plan
+  """
+  TIERED_PER_SEAT
+
+  """
+  Legacy Unlimited Plan
+  """
+  UNLIMITED
+}
+
+"""
+Audit log entry for a org.disable_oauth_app_restrictions event.
+"""
+type OrgDisableOauthAppRestrictionsAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgDisableOauthAppRestrictionsAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.disable_saml event.
+"""
+type OrgDisableSamlAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The SAML provider's digest algorithm URL.
+  """
+  digestMethodUrl: URI
+
+  """
+  The Node ID of the OrgDisableSamlAuditEntry object
+  """
+  id: ID!
+
+  """
+  The SAML provider's issuer URL.
+  """
+  issuerUrl: URI
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The SAML provider's signature algorithm URL.
+  """
+  signatureMethodUrl: URI
+
+  """
+  The SAML provider's single sign-on URL.
+  """
+  singleSignOnUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.disable_two_factor_requirement event.
+"""
+type OrgDisableTwoFactorRequirementAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgDisableTwoFactorRequirementAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.enable_oauth_app_restrictions event.
+"""
+type OrgEnableOauthAppRestrictionsAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgEnableOauthAppRestrictionsAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.enable_saml event.
+"""
+type OrgEnableSamlAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The SAML provider's digest algorithm URL.
+  """
+  digestMethodUrl: URI
+
+  """
+  The Node ID of the OrgEnableSamlAuditEntry object
+  """
+  id: ID!
+
+  """
+  The SAML provider's issuer URL.
+  """
+  issuerUrl: URI
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The SAML provider's signature algorithm URL.
+  """
+  signatureMethodUrl: URI
+
+  """
+  The SAML provider's single sign-on URL.
+  """
+  singleSignOnUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.enable_two_factor_requirement event.
+"""
+type OrgEnableTwoFactorRequirementAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgEnableTwoFactorRequirementAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Ordering options for an organization's enterprise owner connections.
+"""
+input OrgEnterpriseOwnerOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order enterprise owners by.
+  """
+  field: OrgEnterpriseOwnerOrderField!
+}
+
+"""
+Properties by which enterprise owners can be ordered.
+"""
+enum OrgEnterpriseOwnerOrderField {
+  """
+  Order enterprise owners by login.
+  """
+  LOGIN
+}
+
+"""
+Audit log entry for a org.invite_member event.
+"""
+type OrgInviteMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The email address of the organization invitation.
+  """
+  email: String
+
+  """
+  The Node ID of the OrgInviteMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The organization invitation.
+  """
+  organizationInvitation: OrganizationInvitation
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.invite_to_business event.
+"""
+type OrgInviteToBusinessAuditEntry implements AuditEntry & EnterpriseAuditEntryData & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+
+  """
+  The Node ID of the OrgInviteToBusinessAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.oauth_app_access_approved event.
+"""
+type OrgOauthAppAccessApprovedAuditEntry implements AuditEntry & Node & OauthApplicationAuditEntryData & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgOauthAppAccessApprovedAuditEntry object
+  """
+  id: ID!
+
+  """
+  The name of the OAuth application.
+  """
+  oauthApplicationName: String
+
+  """
+  The HTTP path for the OAuth application
+  """
+  oauthApplicationResourcePath: URI
+
+  """
+  The HTTP URL for the OAuth application
+  """
+  oauthApplicationUrl: URI
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.oauth_app_access_blocked event.
+"""
+type OrgOauthAppAccessBlockedAuditEntry implements AuditEntry & Node & OauthApplicationAuditEntryData & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgOauthAppAccessBlockedAuditEntry object
+  """
+  id: ID!
+
+  """
+  The name of the OAuth application.
+  """
+  oauthApplicationName: String
+
+  """
+  The HTTP path for the OAuth application
+  """
+  oauthApplicationResourcePath: URI
+
+  """
+  The HTTP URL for the OAuth application
+  """
+  oauthApplicationUrl: URI
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.oauth_app_access_denied event.
+"""
+type OrgOauthAppAccessDeniedAuditEntry implements AuditEntry & Node & OauthApplicationAuditEntryData & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgOauthAppAccessDeniedAuditEntry object
+  """
+  id: ID!
+
+  """
+  The name of the OAuth application.
+  """
+  oauthApplicationName: String
+
+  """
+  The HTTP path for the OAuth application
+  """
+  oauthApplicationResourcePath: URI
+
+  """
+  The HTTP URL for the OAuth application
+  """
+  oauthApplicationUrl: URI
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.oauth_app_access_requested event.
+"""
+type OrgOauthAppAccessRequestedAuditEntry implements AuditEntry & Node & OauthApplicationAuditEntryData & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgOauthAppAccessRequestedAuditEntry object
+  """
+  id: ID!
+
+  """
+  The name of the OAuth application.
+  """
+  oauthApplicationName: String
+
+  """
+  The HTTP path for the OAuth application
+  """
+  oauthApplicationResourcePath: URI
+
+  """
+  The HTTP URL for the OAuth application
+  """
+  oauthApplicationUrl: URI
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.oauth_app_access_unblocked event.
+"""
+type OrgOauthAppAccessUnblockedAuditEntry implements AuditEntry & Node & OauthApplicationAuditEntryData & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgOauthAppAccessUnblockedAuditEntry object
+  """
+  id: ID!
+
+  """
+  The name of the OAuth application.
+  """
+  oauthApplicationName: String
+
+  """
+  The HTTP path for the OAuth application
+  """
+  oauthApplicationResourcePath: URI
+
+  """
+  The HTTP URL for the OAuth application
+  """
+  oauthApplicationUrl: URI
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.remove_billing_manager event.
+"""
+type OrgRemoveBillingManagerAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgRemoveBillingManagerAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The reason for the billing manager being removed.
+  """
+  reason: OrgRemoveBillingManagerAuditEntryReason
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The reason a billing manager was removed from an Organization.
+"""
+enum OrgRemoveBillingManagerAuditEntryReason {
+  """
+  SAML external identity missing
+  """
+  SAML_EXTERNAL_IDENTITY_MISSING
+
+  """
+  SAML SSO enforcement requires an external identity
+  """
+  SAML_SSO_ENFORCEMENT_REQUIRES_EXTERNAL_IDENTITY
+
+  """
+  The organization required 2FA of its billing managers and this user did not have 2FA enabled.
+  """
+  TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE
+}
+
+"""
+Audit log entry for a org.remove_member event.
+"""
+type OrgRemoveMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgRemoveMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  The types of membership the member has with the organization.
+  """
+  membershipTypes: [OrgRemoveMemberAuditEntryMembershipType!]
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The reason for the member being removed.
+  """
+  reason: OrgRemoveMemberAuditEntryReason
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The type of membership a user has with an Organization.
+"""
+enum OrgRemoveMemberAuditEntryMembershipType {
+  """
+  Organization owners have full access and can change several settings,
+  including the names of repositories that belong to the Organization and Owners
+  team membership. In addition, organization owners can delete the organization
+  and all of its repositories.
+  """
+  ADMIN
+
+  """
+  A billing manager is a user who manages the billing settings for the Organization, such as updating payment information.
+  """
+  BILLING_MANAGER
+
+  """
+  A direct member is a user that is a member of the Organization.
+  """
+  DIRECT_MEMBER
+
+  """
+  An outside collaborator is a person who isn't explicitly a member of the
+  Organization, but who has Read, Write, or Admin permissions to one or more
+  repositories in the organization.
+  """
+  OUTSIDE_COLLABORATOR
+
+  """
+  A suspended member.
+  """
+  SUSPENDED
+
+  """
+  An unaffiliated collaborator is a person who is not a member of the
+  Organization and does not have access to any repositories in the Organization.
+  """
+  UNAFFILIATED
+}
+
+"""
+The reason a member was removed from an Organization.
+"""
+enum OrgRemoveMemberAuditEntryReason {
+  """
+  SAML external identity missing
+  """
+  SAML_EXTERNAL_IDENTITY_MISSING
+
+  """
+  SAML SSO enforcement requires an external identity
+  """
+  SAML_SSO_ENFORCEMENT_REQUIRES_EXTERNAL_IDENTITY
+
+  """
+  User was removed from organization during account recovery
+  """
+  TWO_FACTOR_ACCOUNT_RECOVERY
+
+  """
+  The organization required 2FA of its billing managers and this user did not have 2FA enabled.
+  """
+  TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE
+
+  """
+  User account has been deleted
+  """
+  USER_ACCOUNT_DELETED
+}
+
+"""
+Audit log entry for a org.remove_outside_collaborator event.
+"""
+type OrgRemoveOutsideCollaboratorAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgRemoveOutsideCollaboratorAuditEntry object
+  """
+  id: ID!
+
+  """
+  The types of membership the outside collaborator has with the organization.
+  """
+  membershipTypes: [OrgRemoveOutsideCollaboratorAuditEntryMembershipType!]
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The reason for the outside collaborator being removed from the Organization.
+  """
+  reason: OrgRemoveOutsideCollaboratorAuditEntryReason
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The type of membership a user has with an Organization.
+"""
+enum OrgRemoveOutsideCollaboratorAuditEntryMembershipType {
+  """
+  A billing manager is a user who manages the billing settings for the Organization, such as updating payment information.
+  """
+  BILLING_MANAGER
+
+  """
+  An outside collaborator is a person who isn't explicitly a member of the
+  Organization, but who has Read, Write, or Admin permissions to one or more
+  repositories in the organization.
+  """
+  OUTSIDE_COLLABORATOR
+
+  """
+  An unaffiliated collaborator is a person who is not a member of the
+  Organization and does not have access to any repositories in the organization.
+  """
+  UNAFFILIATED
+}
+
+"""
+The reason an outside collaborator was removed from an Organization.
+"""
+enum OrgRemoveOutsideCollaboratorAuditEntryReason {
+  """
+  SAML external identity missing
+  """
+  SAML_EXTERNAL_IDENTITY_MISSING
+
+  """
+  The organization required 2FA of its billing managers and this user did not have 2FA enabled.
+  """
+  TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE
+}
+
+"""
+Audit log entry for a org.restore_member event.
+"""
+type OrgRestoreMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgRestoreMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The number of custom email routings for the restored member.
+  """
+  restoredCustomEmailRoutingsCount: Int
+
+  """
+  The number of issue assignments for the restored member.
+  """
+  restoredIssueAssignmentsCount: Int
+
+  """
+  Restored organization membership objects.
+  """
+  restoredMemberships: [OrgRestoreMemberAuditEntryMembership!]
+
+  """
+  The number of restored memberships.
+  """
+  restoredMembershipsCount: Int
+
+  """
+  The number of repositories of the restored member.
+  """
+  restoredRepositoriesCount: Int
+
+  """
+  The number of starred repositories for the restored member.
+  """
+  restoredRepositoryStarsCount: Int
+
+  """
+  The number of watched repositories for the restored member.
+  """
+  restoredRepositoryWatchesCount: Int
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Types of memberships that can be restored for an Organization member.
+"""
+union OrgRestoreMemberAuditEntryMembership =
+    OrgRestoreMemberMembershipOrganizationAuditEntryData
+  | OrgRestoreMemberMembershipRepositoryAuditEntryData
+  | OrgRestoreMemberMembershipTeamAuditEntryData
+
+"""
+Metadata for an organization membership for org.restore_member actions
+"""
+type OrgRestoreMemberMembershipOrganizationAuditEntryData implements OrganizationAuditEntryData {
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+}
+
+"""
+Metadata for a repository membership for org.restore_member actions
+"""
+type OrgRestoreMemberMembershipRepositoryAuditEntryData implements RepositoryAuditEntryData {
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+}
+
+"""
+Metadata for a team membership for org.restore_member actions
+"""
+type OrgRestoreMemberMembershipTeamAuditEntryData implements TeamAuditEntryData {
+  """
+  The team associated with the action
+  """
+  team: Team
+
+  """
+  The name of the team
+  """
+  teamName: String
+
+  """
+  The HTTP path for this team
+  """
+  teamResourcePath: URI
+
+  """
+  The HTTP URL for this team
+  """
+  teamUrl: URI
+}
+
+"""
+Audit log entry for a org.unblock_user
+"""
+type OrgUnblockUserAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The user being unblocked by the organization.
+  """
+  blockedUser: User
+
+  """
+  The username of the blocked user.
+  """
+  blockedUserName: String
+
+  """
+  The HTTP path for the blocked user.
+  """
+  blockedUserResourcePath: URI
+
+  """
+  The HTTP URL for the blocked user.
+  """
+  blockedUserUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgUnblockUserAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.update_default_repository_permission
+"""
+type OrgUpdateDefaultRepositoryPermissionAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgUpdateDefaultRepositoryPermissionAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The new base repository permission level for the organization.
+  """
+  permission: OrgUpdateDefaultRepositoryPermissionAuditEntryPermission
+
+  """
+  The former base repository permission level for the organization.
+  """
+  permissionWas: OrgUpdateDefaultRepositoryPermissionAuditEntryPermission
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The default permission a repository can have in an Organization.
+"""
+enum OrgUpdateDefaultRepositoryPermissionAuditEntryPermission {
+  """
+  Can read, clone, push, and add collaborators to repositories.
+  """
+  ADMIN
+
+  """
+  No default permission value.
+  """
+  NONE
+
+  """
+  Can read and clone repositories.
+  """
+  READ
+
+  """
+  Can read, clone and push to repositories.
+  """
+  WRITE
+}
+
+"""
+Audit log entry for a org.update_member event.
+"""
+type OrgUpdateMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgUpdateMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The new member permission level for the organization.
+  """
+  permission: OrgUpdateMemberAuditEntryPermission
+
+  """
+  The former member permission level for the organization.
+  """
+  permissionWas: OrgUpdateMemberAuditEntryPermission
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The permissions available to members on an Organization.
+"""
+enum OrgUpdateMemberAuditEntryPermission {
+  """
+  Can read, clone, push, and add collaborators to repositories.
+  """
+  ADMIN
+
+  """
+  Can read and clone repositories.
+  """
+  READ
+}
+
+"""
+Audit log entry for a org.update_member_repository_creation_permission event.
+"""
+type OrgUpdateMemberRepositoryCreationPermissionAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  Can members create repositories in the organization.
+  """
+  canCreateRepositories: Boolean
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgUpdateMemberRepositoryCreationPermissionAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+
+  """
+  The permission for visibility level of repositories for this organization.
+  """
+  visibility: OrgUpdateMemberRepositoryCreationPermissionAuditEntryVisibility
+}
+
+"""
+The permissions available for repository creation on an Organization.
+"""
+enum OrgUpdateMemberRepositoryCreationPermissionAuditEntryVisibility {
+  """
+  All organization members are restricted from creating any repositories.
+  """
+  ALL
+
+  """
+  All organization members are restricted from creating internal repositories.
+  """
+  INTERNAL
+
+  """
+  All organization members are allowed to create any repositories.
+  """
+  NONE
+
+  """
+  All organization members are restricted from creating private repositories.
+  """
+  PRIVATE
+
+  """
+  All organization members are restricted from creating private or internal repositories.
+  """
+  PRIVATE_INTERNAL
+
+  """
+  All organization members are restricted from creating public repositories.
+  """
+  PUBLIC
+
+  """
+  All organization members are restricted from creating public or internal repositories.
+  """
+  PUBLIC_INTERNAL
+
+  """
+  All organization members are restricted from creating public or private repositories.
+  """
+  PUBLIC_PRIVATE
+}
+
+"""
+Audit log entry for a org.update_member_repository_invitation_permission event.
+"""
+type OrgUpdateMemberRepositoryInvitationPermissionAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  Can outside collaborators be invited to repositories in the organization.
+  """
+  canInviteOutsideCollaboratorsToRepositories: Boolean
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgUpdateMemberRepositoryInvitationPermissionAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+An account on GitHub, with one or more owners, that has repositories, members and teams.
+"""
+type Organization implements Actor & AnnouncementBanner & MemberStatusable & Node & PackageOwner & ProfileOwner & ProjectOwner & ProjectV2Owner & ProjectV2Recent & RepositoryDiscussionAuthor & RepositoryDiscussionCommentAuthor & RepositoryOwner & Sponsorable & UniformResourceLocatable {
+  """
+  The text of the announcement
+  """
+  announcement: String
+
+  """
+  The expiration date of the announcement, if any
+  """
+  announcementExpiresAt: DateTime
+
+  """
+  Whether the announcement can be dismissed by the user
+  """
+  announcementUserDismissible: Boolean
+
+  """
+  Determine if this repository owner has any items that can be pinned to their profile.
+  """
+  anyPinnableItems(
+    """
+    Filter to only a particular kind of pinnable item.
+    """
+    type: PinnableItemType
+  ): Boolean!
+
+  """
+  Identifies the date and time when the organization was archived.
+  """
+  archivedAt: DateTime
+
+  """
+  Audit log entries of the organization
+  """
+  auditLog(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the returned audit log entries.
+    """
+    orderBy: AuditLogOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    The query string to filter audit entries
+    """
+    query: String
+  ): OrganizationAuditEntryConnection!
+
+  """
+  A URL pointing to the organization's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The organization's public profile description.
+  """
+  description: String
+
+  """
+  The organization's public profile description rendered to HTML.
+  """
+  descriptionHTML: String
+
+  """
+  A list of domains owned by the organization.
+  """
+  domains(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Filter by if the domain is approved.
+    """
+    isApproved: Boolean = null
+
+    """
+    Filter by if the domain is verified.
+    """
+    isVerified: Boolean = null
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for verifiable domains returned.
+    """
+    orderBy: VerifiableDomainOrder = {field: DOMAIN, direction: ASC}
+  ): VerifiableDomainConnection
+
+  """
+  The organization's public email.
+  """
+  email: String
+
+  """
+  A list of owners of the organization's enterprise account.
+  """
+  enterpriseOwners(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for enterprise owners returned from the connection.
+    """
+    orderBy: OrgEnterpriseOwnerOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The organization role to filter by.
+    """
+    organizationRole: RoleInOrganization
+
+    """
+    The search string to look for.
+    """
+    query: String
+  ): OrganizationEnterpriseOwnerConnection!
+
+  """
+  The estimated next GitHub Sponsors payout for this user/organization in cents (USD).
+  """
+  estimatedNextSponsorsPayoutInCents: Int!
+
+  """
+  True if this user/organization has a GitHub Sponsors listing.
+  """
+  hasSponsorsListing: Boolean!
+
+  """
+  The Node ID of the Organization object
+  """
+  id: ID!
+
+  """
+  The interaction ability settings for this organization.
+  """
+  interactionAbility: RepositoryInteractionAbility
+
+  """
+  The setting value for whether the organization has an IP allow list enabled.
+  """
+  ipAllowListEnabledSetting: IpAllowListEnabledSettingValue!
+
+  """
+  The IP addresses that are allowed to access resources owned by the organization.
+  """
+  ipAllowListEntries(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for IP allow list entries returned.
+    """
+    orderBy: IpAllowListEntryOrder = {field: ALLOW_LIST_VALUE, direction: ASC}
+  ): IpAllowListEntryConnection!
+
+  """
+  The setting value for whether the organization has IP allow list configuration for installed GitHub Apps enabled.
+  """
+  ipAllowListForInstalledAppsEnabledSetting: IpAllowListForInstalledAppsEnabledSettingValue!
+
+  """
+  Whether the given account is sponsoring this user/organization.
+  """
+  isSponsoredBy(
+    """
+    The target account's login.
+    """
+    accountLogin: String!
+  ): Boolean!
+
+  """
+  True if the viewer is sponsored by this user/organization.
+  """
+  isSponsoringViewer: Boolean!
+
+  """
+  Whether the organization has verified its profile email and website.
+  """
+  isVerified: Boolean!
+
+  """
+  Showcases a selection of repositories and gists that the profile owner has
+  either curated or that have been selected automatically based on popularity.
+  """
+  itemShowcase: ProfileItemShowcase!
+
+  """
+  The organization's public profile location.
+  """
+  location: String
+
+  """
+  The organization's login name.
+  """
+  login: String!
+
+  """
+  A list of all mannequins for this organization.
+  """
+  mannequins(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter mannequins by login.
+    """
+    login: String
+
+    """
+    Ordering options for mannequins returned from the connection.
+    """
+    orderBy: MannequinOrder = {field: CREATED_AT, direction: ASC}
+  ): MannequinConnection!
+
+  """
+  Get the status messages members of this entity have set that are either public or visible only to the organization.
+  """
+  memberStatuses(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for user statuses returned from the connection.
+    """
+    orderBy: UserStatusOrder = {field: UPDATED_AT, direction: DESC}
+  ): UserStatusConnection!
+
+  """
+  Members can fork private repositories in this organization
+  """
+  membersCanForkPrivateRepositories: Boolean!
+
+  """
+  A list of users who are members of this organization.
+  """
+  membersWithRole(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): OrganizationMemberConnection!
+
+  """
+  The estimated monthly GitHub Sponsors income for this user/organization in cents (USD).
+  """
+  monthlyEstimatedSponsorsIncomeInCents: Int!
+
+  """
+  The organization's public profile name.
+  """
+  name: String
+
+  """
+  The HTTP path creating a new team
+  """
+  newTeamResourcePath: URI!
+
+  """
+  The HTTP URL creating a new team
+  """
+  newTeamUrl: URI!
+
+  """
+  Indicates if email notification delivery for this organization is restricted to verified or approved domains.
+  """
+  notificationDeliveryRestrictionEnabledSetting: NotificationRestrictionSettingValue!
+
+  """
+  The billing email for the organization.
+  """
+  organizationBillingEmail: String
+
+  """
+  A list of packages under the owner.
+  """
+  packages(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Find packages by their names.
+    """
+    names: [String]
+
+    """
+    Ordering of the returned packages.
+    """
+    orderBy: PackageOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    Filter registry package by type.
+    """
+    packageType: PackageType
+
+    """
+    Find packages in a repository by ID.
+    """
+    repositoryId: ID
+  ): PackageConnection!
+
+  """
+  A list of users who have been invited to join this organization.
+  """
+  pendingMembers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  A list of repositories and gists this profile owner can pin to their profile.
+  """
+  pinnableItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter the types of pinnable items that are returned.
+    """
+    types: [PinnableItemType!]
+  ): PinnableItemConnection!
+
+  """
+  A list of repositories and gists this profile owner has pinned to their profile
+  """
+  pinnedItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter the types of pinned items that are returned.
+    """
+    types: [PinnableItemType!]
+  ): PinnableItemConnection!
+
+  """
+  Returns how many more items this profile owner can pin to their profile.
+  """
+  pinnedItemsRemaining: Int!
+
+  """
+  Find project by number.
+  """
+  project(
+    """
+    The project number to find.
+    """
+    number: Int!
+  ): Project
+
+  """
+  Find a project by number.
+  """
+  projectV2(
+    """
+    The project number.
+    """
+    number: Int!
+  ): ProjectV2
+
+  """
+  A list of projects under the owner.
+  """
+  projects(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for projects returned from the connection
+    """
+    orderBy: ProjectOrder
+
+    """
+    Query to search projects by, currently only searching by name.
+    """
+    search: String
+
+    """
+    A list of states to filter the projects by.
+    """
+    states: [ProjectState!]
+  ): ProjectConnection!
+
+  """
+  The HTTP path listing organization's projects
+  """
+  projectsResourcePath: URI!
+
+  """
+  The HTTP URL listing organization's projects
+  """
+  projectsUrl: URI!
+
+  """
+  A list of projects under the owner.
+  """
+  projectsV2(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    How to order the returned projects.
+    """
+    orderBy: ProjectV2Order = {field: NUMBER, direction: DESC}
+
+    """
+    A project to search for under the the owner.
+    """
+    query: String
+  ): ProjectV2Connection!
+
+  """
+  Recent projects that this user has modified in the context of the owner.
+  """
+  recentProjects(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2Connection!
+
+  """
+  A list of repositories that the user owns.
+  """
+  repositories(
+    """
+    Array of viewer's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    current viewer owns.
+    """
+    affiliations: [RepositoryAffiliation]
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    If non-null, filters repositories according to whether they have issues enabled
+    """
+    hasIssuesEnabled: Boolean
+
+    """
+    If non-null, filters repositories according to whether they are archived and not maintained
+    """
+    isArchived: Boolean
+
+    """
+    If non-null, filters repositories according to whether they are forks of another repository
+    """
+    isFork: Boolean
+
+    """
+    If non-null, filters repositories according to whether they have been locked
+    """
+    isLocked: Boolean
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder
+
+    """
+    Array of owner's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    organization or user being viewed owns.
+    """
+    ownerAffiliations: [RepositoryAffiliation] = [OWNER, COLLABORATOR]
+
+    """
+    If non-null, filters repositories according to privacy
+    """
+    privacy: RepositoryPrivacy
+  ): RepositoryConnection!
+
+  """
+  Find Repository.
+  """
+  repository(
+    """
+    Follow repository renames. If disabled, a repository referenced by its old name will return an error.
+    """
+    followRenames: Boolean = true
+
+    """
+    Name of Repository to find.
+    """
+    name: String!
+  ): Repository
+
+  """
+  Discussion comments this user has authored.
+  """
+  repositoryDiscussionComments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter discussion comments to only those that were marked as the answer
+    """
+    onlyAnswers: Boolean = false
+
+    """
+    Filter discussion comments to only those in a specific repository.
+    """
+    repositoryId: ID
+  ): DiscussionCommentConnection!
+
+  """
+  Discussions this user has started.
+  """
+  repositoryDiscussions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Filter discussions to only those that have been answered or not. Defaults to
+    including both answered and unanswered discussions.
+    """
+    answered: Boolean = null
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for discussions returned from the connection.
+    """
+    orderBy: DiscussionOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    Filter discussions to only those in a specific repository.
+    """
+    repositoryId: ID
+
+    """
+    A list of states to filter the discussions by.
+    """
+    states: [DiscussionState!] = []
+  ): DiscussionConnection!
+
+  """
+  A list of all repository migrations for this organization.
+  """
+  repositoryMigrations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repository migrations returned.
+    """
+    orderBy: RepositoryMigrationOrder = {field: CREATED_AT, direction: ASC}
+
+    """
+    Filter repository migrations by repository name.
+    """
+    repositoryName: String
+
+    """
+    Filter repository migrations by state.
+    """
+    state: MigrationState
+  ): RepositoryMigrationConnection!
+
+  """
+  When true the organization requires all members, billing managers, and outside
+  collaborators to enable two-factor authentication.
+  """
+  requiresTwoFactorAuthentication: Boolean
+
+  """
+  The HTTP path for this organization.
+  """
+  resourcePath: URI!
+
+  """
+  Returns a single ruleset from the current organization by ID.
+  """
+  ruleset(
+    """
+    The ID of the ruleset to be returned.
+    """
+    databaseId: Int!
+  ): RepositoryRuleset
+
+  """
+  A list of rulesets for this organization.
+  """
+  rulesets(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Return rulesets configured at higher levels that apply to this organization
+    """
+    includeParents: Boolean = true
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): RepositoryRulesetConnection
+
+  """
+  The Organization's SAML identity provider. Visible to (1) organization owners,
+  (2) organization owners' personal access tokens (classic) with read:org or
+  admin:org scope, (3) GitHub App with an installation token with read or write
+  access to members.
+  """
+  samlIdentityProvider: OrganizationIdentityProvider
+
+  """
+  List of users and organizations this entity is sponsoring.
+  """
+  sponsoring(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the users and organizations returned from the connection.
+    """
+    orderBy: SponsorOrder = {field: RELEVANCE, direction: DESC}
+  ): SponsorConnection!
+
+  """
+  List of sponsors for this user or organization.
+  """
+  sponsors(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsors returned from the connection.
+    """
+    orderBy: SponsorOrder = {field: RELEVANCE, direction: DESC}
+
+    """
+    If given, will filter for sponsors at the given tier. Will only return
+    sponsors whose tier the viewer is permitted to see.
+    """
+    tierId: ID
+  ): SponsorConnection!
+
+  """
+  Events involving this sponsorable, such as new sponsorships.
+  """
+  sponsorsActivities(
+    """
+    Filter activities to only the specified actions.
+    """
+    actions: [SponsorsActivityAction!] = []
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether to include those events where this sponsorable acted as the sponsor.
+    Defaults to only including events where this sponsorable was the recipient
+    of a sponsorship.
+    """
+    includeAsSponsor: Boolean = false
+
+    """
+    Whether or not to include private activities in the result set. Defaults to including public and private activities.
+    """
+    includePrivate: Boolean = true
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for activity returned from the connection.
+    """
+    orderBy: SponsorsActivityOrder = {field: TIMESTAMP, direction: DESC}
+
+    """
+    Filter activities returned to only those that occurred in the most recent
+    specified time period. Set to ALL to avoid filtering by when the activity
+    occurred. Will be ignored if `since` or `until` is given.
+    """
+    period: SponsorsActivityPeriod = MONTH
+
+    """
+    Filter activities to those that occurred on or after this time.
+    """
+    since: DateTime
+
+    """
+    Filter activities to those that occurred before this time.
+    """
+    until: DateTime
+  ): SponsorsActivityConnection!
+
+  """
+  The GitHub Sponsors listing for this user or organization.
+  """
+  sponsorsListing: SponsorsListing
+
+  """
+  The sponsorship from the viewer to this user/organization; that is, the sponsorship where you're the sponsor.
+  """
+  sponsorshipForViewerAsSponsor(
+    """
+    Whether to return the sponsorship only if it's still active. Pass false to
+    get the viewer's sponsorship back even if it has been cancelled.
+    """
+    activeOnly: Boolean = true
+  ): Sponsorship
+
+  """
+  The sponsorship from this user/organization to the viewer; that is, the sponsorship you're receiving.
+  """
+  sponsorshipForViewerAsSponsorable(
+    """
+    Whether to return the sponsorship only if it's still active. Pass false to
+    get the sponsorship back even if it has been cancelled.
+    """
+    activeOnly: Boolean = true
+  ): Sponsorship
+
+  """
+  List of sponsorship updates sent from this sponsorable to sponsors.
+  """
+  sponsorshipNewsletters(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsorship updates returned from the connection.
+    """
+    orderBy: SponsorshipNewsletterOrder = {field: CREATED_AT, direction: DESC}
+  ): SponsorshipNewsletterConnection!
+
+  """
+  The sponsorships where this user or organization is the maintainer receiving the funds.
+  """
+  sponsorshipsAsMaintainer(
+    """
+    Whether to include only sponsorships that are active right now, versus all
+    sponsorships this maintainer has ever received.
+    """
+    activeOnly: Boolean = true
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether or not to include private sponsorships in the result set
+    """
+    includePrivate: Boolean = false
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsorships returned from this connection. If left
+    blank, the sponsorships will be ordered based on relevancy to the viewer.
+    """
+    orderBy: SponsorshipOrder
+  ): SponsorshipConnection!
+
+  """
+  The sponsorships where this user or organization is the funder.
+  """
+  sponsorshipsAsSponsor(
+    """
+    Whether to include only sponsorships that are active right now, versus all sponsorships this sponsor has ever made.
+    """
+    activeOnly: Boolean = true
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter sponsorships returned to those for the specified maintainers. That
+    is, the recipient of the sponsorship is a user or organization with one of
+    the given logins.
+    """
+    maintainerLogins: [String!]
+
+    """
+    Ordering options for sponsorships returned from this connection. If left
+    blank, the sponsorships will be ordered based on relevancy to the viewer.
+    """
+    orderBy: SponsorshipOrder
+  ): SponsorshipConnection!
+
+  """
+  Find an organization's team by its slug.
+  """
+  team(
+    """
+    The name or slug of the team to find.
+    """
+    slug: String!
+  ): Team
+
+  """
+  A list of teams in this organization.
+  """
+  teams(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    If true, filters teams that are mapped to an LDAP Group (Enterprise only)
+    """
+    ldapMapped: Boolean
+
+    """
+    If non-null, filters teams according to notification setting
+    """
+    notificationSetting: TeamNotificationSetting
+
+    """
+    Ordering options for teams returned from the connection
+    """
+    orderBy: TeamOrder
+
+    """
+    If non-null, filters teams according to privacy
+    """
+    privacy: TeamPrivacy
+
+    """
+    If non-null, filters teams with query on team name and team slug
+    """
+    query: String
+
+    """
+    If non-null, filters teams according to whether the viewer is an admin or member on team
+    """
+    role: TeamRole
+
+    """
+    If true, restrict to only root teams
+    """
+    rootTeamsOnly: Boolean = false
+
+    """
+    User logins to filter by
+    """
+    userLogins: [String!]
+  ): TeamConnection!
+
+  """
+  The HTTP path listing organization's teams
+  """
+  teamsResourcePath: URI!
+
+  """
+  The HTTP URL listing organization's teams
+  """
+  teamsUrl: URI!
+
+  """
+  The amount in United States cents (e.g., 500 = $5.00 USD) that this entity has
+  spent on GitHub to fund sponsorships. Only returns a value when viewed by the
+  user themselves or by a user who can manage sponsorships for the requested organization.
+  """
+  totalSponsorshipAmountAsSponsorInCents(
+    """
+    Filter payments to those that occurred on or after this time.
+    """
+    since: DateTime
+
+    """
+    Filter payments to those made to the users or organizations with the specified usernames.
+    """
+    sponsorableLogins: [String!] = []
+
+    """
+    Filter payments to those that occurred before this time.
+    """
+    until: DateTime
+  ): Int
+
+  """
+  The organization's Twitter username.
+  """
+  twitterUsername: String
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this organization.
+  """
+  url: URI!
+
+  """
+  Organization is adminable by the viewer.
+  """
+  viewerCanAdminister: Boolean!
+
+  """
+  Can the viewer pin repositories and gists to the profile?
+  """
+  viewerCanChangePinnedItems: Boolean!
+
+  """
+  Can the current viewer create new projects on this owner.
+  """
+  viewerCanCreateProjects: Boolean!
+
+  """
+  Viewer can create repositories on this organization
+  """
+  viewerCanCreateRepositories: Boolean!
+
+  """
+  Viewer can create teams on this organization.
+  """
+  viewerCanCreateTeams: Boolean!
+
+  """
+  Whether or not the viewer is able to sponsor this user/organization.
+  """
+  viewerCanSponsor: Boolean!
+
+  """
+  Viewer is an active member of this organization.
+  """
+  viewerIsAMember: Boolean!
+
+  """
+  Whether or not this Organization is followed by the viewer.
+  """
+  viewerIsFollowing: Boolean!
+
+  """
+  True if the viewer is sponsoring this user/organization.
+  """
+  viewerIsSponsoring: Boolean!
+
+  """
+  Whether contributors are required to sign off on web-based commits for repositories in this organization.
+  """
+  webCommitSignoffRequired: Boolean!
+
+  """
+  The organization's public profile URL.
+  """
+  websiteUrl: URI
+}
+
+"""
+An audit entry in an organization audit log.
+"""
+union OrganizationAuditEntry =
+    MembersCanDeleteReposClearAuditEntry
+  | MembersCanDeleteReposDisableAuditEntry
+  | MembersCanDeleteReposEnableAuditEntry
+  | OauthApplicationCreateAuditEntry
+  | OrgAddBillingManagerAuditEntry
+  | OrgAddMemberAuditEntry
+  | OrgBlockUserAuditEntry
+  | OrgConfigDisableCollaboratorsOnlyAuditEntry
+  | OrgConfigEnableCollaboratorsOnlyAuditEntry
+  | OrgCreateAuditEntry
+  | OrgDisableOauthAppRestrictionsAuditEntry
+  | OrgDisableSamlAuditEntry
+  | OrgDisableTwoFactorRequirementAuditEntry
+  | OrgEnableOauthAppRestrictionsAuditEntry
+  | OrgEnableSamlAuditEntry
+  | OrgEnableTwoFactorRequirementAuditEntry
+  | OrgInviteMemberAuditEntry
+  | OrgInviteToBusinessAuditEntry
+  | OrgOauthAppAccessApprovedAuditEntry
+  | OrgOauthAppAccessBlockedAuditEntry
+  | OrgOauthAppAccessDeniedAuditEntry
+  | OrgOauthAppAccessRequestedAuditEntry
+  | OrgOauthAppAccessUnblockedAuditEntry
+  | OrgRemoveBillingManagerAuditEntry
+  | OrgRemoveMemberAuditEntry
+  | OrgRemoveOutsideCollaboratorAuditEntry
+  | OrgRestoreMemberAuditEntry
+  | OrgUnblockUserAuditEntry
+  | OrgUpdateDefaultRepositoryPermissionAuditEntry
+  | OrgUpdateMemberAuditEntry
+  | OrgUpdateMemberRepositoryCreationPermissionAuditEntry
+  | OrgUpdateMemberRepositoryInvitationPermissionAuditEntry
+  | PrivateRepositoryForkingDisableAuditEntry
+  | PrivateRepositoryForkingEnableAuditEntry
+  | RepoAccessAuditEntry
+  | RepoAddMemberAuditEntry
+  | RepoAddTopicAuditEntry
+  | RepoArchivedAuditEntry
+  | RepoChangeMergeSettingAuditEntry
+  | RepoConfigDisableAnonymousGitAccessAuditEntry
+  | RepoConfigDisableCollaboratorsOnlyAuditEntry
+  | RepoConfigDisableContributorsOnlyAuditEntry
+  | RepoConfigDisableSockpuppetDisallowedAuditEntry
+  | RepoConfigEnableAnonymousGitAccessAuditEntry
+  | RepoConfigEnableCollaboratorsOnlyAuditEntry
+  | RepoConfigEnableContributorsOnlyAuditEntry
+  | RepoConfigEnableSockpuppetDisallowedAuditEntry
+  | RepoConfigLockAnonymousGitAccessAuditEntry
+  | RepoConfigUnlockAnonymousGitAccessAuditEntry
+  | RepoCreateAuditEntry
+  | RepoDestroyAuditEntry
+  | RepoRemoveMemberAuditEntry
+  | RepoRemoveTopicAuditEntry
+  | RepositoryVisibilityChangeDisableAuditEntry
+  | RepositoryVisibilityChangeEnableAuditEntry
+  | TeamAddMemberAuditEntry
+  | TeamAddRepositoryAuditEntry
+  | TeamChangeParentTeamAuditEntry
+  | TeamRemoveMemberAuditEntry
+  | TeamRemoveRepositoryAuditEntry
+
+"""
+The connection type for OrganizationAuditEntry.
+"""
+type OrganizationAuditEntryConnection {
+  """
+  A list of edges.
+  """
+  edges: [OrganizationAuditEntryEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [OrganizationAuditEntry]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Metadata for an audit entry with action org.*
+"""
+interface OrganizationAuditEntryData {
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+}
+
+"""
+An edge in a connection.
+"""
+type OrganizationAuditEntryEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: OrganizationAuditEntry
+}
+
+"""
+A list of organizations managed by an enterprise.
+"""
+type OrganizationConnection {
+  """
+  A list of edges.
+  """
+  edges: [OrganizationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Organization]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type OrganizationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Organization
+}
+
+"""
+The connection type for User.
+"""
+type OrganizationEnterpriseOwnerConnection {
+  """
+  A list of edges.
+  """
+  edges: [OrganizationEnterpriseOwnerEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An enterprise owner in the context of an organization that is part of the enterprise.
+"""
+type OrganizationEnterpriseOwnerEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: User
+
+  """
+  The role of the owner with respect to the organization.
+  """
+  organizationRole: RoleInOrganization!
+}
+
+"""
+An Identity Provider configured to provision SAML and SCIM identities for
+Organizations. Visible to (1) organization owners, (2) organization owners'
+personal access tokens (classic) with read:org or admin:org scope, (3) GitHub
+App with an installation token with read or write access to members.
+"""
+type OrganizationIdentityProvider implements Node {
+  """
+  The digest algorithm used to sign SAML requests for the Identity Provider.
+  """
+  digestMethod: URI
+
+  """
+  External Identities provisioned by this Identity Provider
+  """
+  externalIdentities(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter to external identities with the users login
+    """
+    login: String
+
+    """
+    Filter to external identities with valid org membership only
+    """
+    membersOnly: Boolean
+
+    """
+    Filter to external identities with the users userName/NameID attribute
+    """
+    userName: String
+  ): ExternalIdentityConnection!
+
+  """
+  The Node ID of the OrganizationIdentityProvider object
+  """
+  id: ID!
+
+  """
+  The x509 certificate used by the Identity Provider to sign assertions and responses.
+  """
+  idpCertificate: X509Certificate
+
+  """
+  The Issuer Entity ID for the SAML Identity Provider
+  """
+  issuer: String
+
+  """
+  Organization this Identity Provider belongs to
+  """
+  organization: Organization
+
+  """
+  The signature algorithm used to sign SAML requests for the Identity Provider.
+  """
+  signatureMethod: URI
+
+  """
+  The URL endpoint for the Identity Provider's SAML SSO.
+  """
+  ssoUrl: URI
+}
+
+"""
+An Invitation for a user to an organization.
+"""
+type OrganizationInvitation implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The email address of the user invited to the organization.
+  """
+  email: String
+
+  """
+  The Node ID of the OrganizationInvitation object
+  """
+  id: ID!
+
+  """
+  The source of the invitation.
+  """
+  invitationSource: OrganizationInvitationSource!
+
+  """
+  The type of invitation that was sent (e.g. email, user).
+  """
+  invitationType: OrganizationInvitationType!
+
+  """
+  The user who was invited to the organization.
+  """
+  invitee: User
+
+  """
+  The user who created the invitation.
+  """
+  inviter: User!
+
+  """
+  The organization the invite is for
+  """
+  organization: Organization!
+
+  """
+  The user's pending role in the organization (e.g. member, owner).
+  """
+  role: OrganizationInvitationRole!
+}
+
+"""
+The connection type for OrganizationInvitation.
+"""
+type OrganizationInvitationConnection {
+  """
+  A list of edges.
+  """
+  edges: [OrganizationInvitationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [OrganizationInvitation]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type OrganizationInvitationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: OrganizationInvitation
+}
+
+"""
+The possible organization invitation roles.
+"""
+enum OrganizationInvitationRole {
+  """
+  The user is invited to be an admin of the organization.
+  """
+  ADMIN
+
+  """
+  The user is invited to be a billing manager of the organization.
+  """
+  BILLING_MANAGER
+
+  """
+  The user is invited to be a direct member of the organization.
+  """
+  DIRECT_MEMBER
+
+  """
+  The user's previous role will be reinstated.
+  """
+  REINSTATE
+}
+
+"""
+The possible organization invitation sources.
+"""
+enum OrganizationInvitationSource {
+  """
+  The invitation was created from the web interface or from API
+  """
+  MEMBER
+
+  """
+  The invitation was created from SCIM
+  """
+  SCIM
+
+  """
+  The invitation was sent before this feature was added
+  """
+  UNKNOWN
+}
+
+"""
+The possible organization invitation types.
+"""
+enum OrganizationInvitationType {
+  """
+  The invitation was to an email address.
+  """
+  EMAIL
+
+  """
+  The invitation was to an existing user.
+  """
+  USER
+}
+
+"""
+The connection type for User.
+"""
+type OrganizationMemberConnection {
+  """
+  A list of edges.
+  """
+  edges: [OrganizationMemberEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a user within an organization.
+"""
+type OrganizationMemberEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  Whether the organization member has two factor enabled or not. Returns null if information is not available to viewer.
+  """
+  hasTwoFactorEnabled: Boolean
+
+  """
+  The item at the end of the edge.
+  """
+  node: User
+
+  """
+  The role this user has in the organization.
+  """
+  role: OrganizationMemberRole
+}
+
+"""
+The possible roles within an organization for its members.
+"""
+enum OrganizationMemberRole {
+  """
+  The user is an administrator of the organization.
+  """
+  ADMIN
+
+  """
+  The user is a member of the organization.
+  """
+  MEMBER
+}
+
+"""
+The possible values for the members can create repositories setting on an organization.
+"""
+enum OrganizationMembersCanCreateRepositoriesSettingValue {
+  """
+  Members will be able to create public and private repositories.
+  """
+  ALL
+
+  """
+  Members will not be able to create public or private repositories.
+  """
+  DISABLED
+
+  """
+  Members will be able to create only internal repositories.
+  """
+  INTERNAL
+
+  """
+  Members will be able to create only private repositories.
+  """
+  PRIVATE
+}
+
+"""
+A GitHub Enterprise Importer (GEI) organization migration.
+"""
+type OrganizationMigration implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: String
+
+  """
+  The reason the organization migration failed.
+  """
+  failureReason: String
+
+  """
+  The Node ID of the OrganizationMigration object
+  """
+  id: ID!
+
+  """
+  The remaining amount of repos to be migrated.
+  """
+  remainingRepositoriesCount: Int
+
+  """
+  The name of the source organization to be migrated.
+  """
+  sourceOrgName: String!
+
+  """
+  The URL of the source organization to migrate.
+  """
+  sourceOrgUrl: URI!
+
+  """
+  The migration state.
+  """
+  state: OrganizationMigrationState!
+
+  """
+  The name of the target organization.
+  """
+  targetOrgName: String!
+
+  """
+  The total amount of repositories to be migrated.
+  """
+  totalRepositoriesCount: Int
+}
+
+"""
+The Octoshift Organization migration state.
+"""
+enum OrganizationMigrationState {
+  """
+  The Octoshift migration has failed.
+  """
+  FAILED
+
+  """
+  The Octoshift migration has invalid credentials.
+  """
+  FAILED_VALIDATION
+
+  """
+  The Octoshift migration is in progress.
+  """
+  IN_PROGRESS
+
+  """
+  The Octoshift migration has not started.
+  """
+  NOT_STARTED
+
+  """
+  The Octoshift migration needs to have its credentials validated.
+  """
+  PENDING_VALIDATION
+
+  """
+  The Octoshift migration is performing post repository migrations.
+  """
+  POST_REPO_MIGRATION
+
+  """
+  The Octoshift migration is performing pre repository migrations.
+  """
+  PRE_REPO_MIGRATION
+
+  """
+  The Octoshift migration has been queued.
+  """
+  QUEUED
+
+  """
+  The Octoshift org migration is performing repository migrations.
+  """
+  REPO_MIGRATION
+
+  """
+  The Octoshift migration has succeeded.
+  """
+  SUCCEEDED
+}
+
+"""
+Used for argument of CreateProjectV2 mutation.
+"""
+union OrganizationOrUser = Organization | User
+
+"""
+Ordering options for organization connections.
+"""
+input OrganizationOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order organizations by.
+  """
+  field: OrganizationOrderField!
+}
+
+"""
+Properties by which organization connections can be ordered.
+"""
+enum OrganizationOrderField {
+  """
+  Order organizations by creation time
+  """
+  CREATED_AT
+
+  """
+  Order organizations by login
+  """
+  LOGIN
+}
+
+"""
+An organization teams hovercard context
+"""
+type OrganizationTeamsHovercardContext implements HovercardContext {
+  """
+  A string describing this context
+  """
+  message: String!
+
+  """
+  An octicon to accompany this context
+  """
+  octicon: String!
+
+  """
+  Teams in this organization the user is a member of that are relevant
+  """
+  relevantTeams(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): TeamConnection!
+
+  """
+  The path for the full team list for this user
+  """
+  teamsResourcePath: URI!
+
+  """
+  The URL for the full team list for this user
+  """
+  teamsUrl: URI!
+
+  """
+  The total number of teams the user is on in the organization
+  """
+  totalTeamCount: Int!
+}
+
+"""
+An organization list hovercard context
+"""
+type OrganizationsHovercardContext implements HovercardContext {
+  """
+  A string describing this context
+  """
+  message: String!
+
+  """
+  An octicon to accompany this context
+  """
+  octicon: String!
+
+  """
+  Organizations this user is a member of that are relevant
+  """
+  relevantOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the User's organizations.
+    """
+    orderBy: OrganizationOrder = null
+  ): OrganizationConnection!
+
+  """
+  The total number of organizations this user is in
+  """
+  totalOrganizationCount: Int!
+}
+
+"""
+Information for an uploaded package.
+"""
+type Package implements Node {
+  """
+  The Node ID of the Package object
+  """
+  id: ID!
+
+  """
+  Find the latest version for the package.
+  """
+  latestVersion: PackageVersion
+
+  """
+  Identifies the name of the package.
+  """
+  name: String!
+
+  """
+  Identifies the type of the package.
+  """
+  packageType: PackageType!
+
+  """
+  The repository this package belongs to.
+  """
+  repository: Repository
+
+  """
+  Statistics about package activity.
+  """
+  statistics: PackageStatistics
+
+  """
+  Find package version by version string.
+  """
+  version(
+    """
+    The package version.
+    """
+    version: String!
+  ): PackageVersion
+
+  """
+  list of versions for this package
+  """
+  versions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering of the returned packages.
+    """
+    orderBy: PackageVersionOrder = {field: CREATED_AT, direction: DESC}
+  ): PackageVersionConnection!
+}
+
+"""
+The connection type for Package.
+"""
+type PackageConnection {
+  """
+  A list of edges.
+  """
+  edges: [PackageEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Package]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PackageEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Package
+}
+
+"""
+A file in a package version.
+"""
+type PackageFile implements Node {
+  """
+  The Node ID of the PackageFile object
+  """
+  id: ID!
+
+  """
+  MD5 hash of the file.
+  """
+  md5: String
+
+  """
+  Name of the file.
+  """
+  name: String!
+
+  """
+  The package version this file belongs to.
+  """
+  packageVersion: PackageVersion
+
+  """
+  SHA1 hash of the file.
+  """
+  sha1: String
+
+  """
+  SHA256 hash of the file.
+  """
+  sha256: String
+
+  """
+  Size of the file in bytes.
+  """
+  size: Int
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  URL to download the asset.
+  """
+  url: URI
+}
+
+"""
+The connection type for PackageFile.
+"""
+type PackageFileConnection {
+  """
+  A list of edges.
+  """
+  edges: [PackageFileEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PackageFile]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PackageFileEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PackageFile
+}
+
+"""
+Ways in which lists of package files can be ordered upon return.
+"""
+input PackageFileOrder {
+  """
+  The direction in which to order package files by the specified field.
+  """
+  direction: OrderDirection
+
+  """
+  The field in which to order package files by.
+  """
+  field: PackageFileOrderField
+}
+
+"""
+Properties by which package file connections can be ordered.
+"""
+enum PackageFileOrderField {
+  """
+  Order package files by creation time
+  """
+  CREATED_AT
+}
+
+"""
+Ways in which lists of packages can be ordered upon return.
+"""
+input PackageOrder {
+  """
+  The direction in which to order packages by the specified field.
+  """
+  direction: OrderDirection
+
+  """
+  The field in which to order packages by.
+  """
+  field: PackageOrderField
+}
+
+"""
+Properties by which package connections can be ordered.
+"""
+enum PackageOrderField {
+  """
+  Order packages by creation time
+  """
+  CREATED_AT
+}
+
+"""
+Represents an owner of a package.
+"""
+interface PackageOwner {
+  """
+  The Node ID of the PackageOwner object
+  """
+  id: ID!
+
+  """
+  A list of packages under the owner.
+  """
+  packages(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Find packages by their names.
+    """
+    names: [String]
+
+    """
+    Ordering of the returned packages.
+    """
+    orderBy: PackageOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    Filter registry package by type.
+    """
+    packageType: PackageType
+
+    """
+    Find packages in a repository by ID.
+    """
+    repositoryId: ID
+  ): PackageConnection!
+}
+
+"""
+Represents a object that contains package activity statistics such as downloads.
+"""
+type PackageStatistics {
+  """
+  Number of times the package was downloaded since it was created.
+  """
+  downloadsTotalCount: Int!
+}
+
+"""
+A version tag contains the mapping between a tag name and a version.
+"""
+type PackageTag implements Node {
+  """
+  The Node ID of the PackageTag object
+  """
+  id: ID!
+
+  """
+  Identifies the tag name of the version.
+  """
+  name: String!
+
+  """
+  Version that the tag is associated with.
+  """
+  version: PackageVersion
+}
+
+"""
+The possible types of a package.
+"""
+enum PackageType {
+  """
+  A debian package.
+  """
+  DEBIAN
+
+  """
+  A docker image.
+  """
+  DOCKER
+    @deprecated(
+      reason: "DOCKER will be removed from this enum as this type will be migrated to only be used by the Packages REST API. Removal on 2021-06-21 UTC."
+    )
+
+  """
+  A maven package.
+  """
+  MAVEN
+    @deprecated(
+      reason: "MAVEN will be removed from this enum as this type will be migrated to only be used by the Packages REST API. Removal on 2023-02-10 UTC."
+    )
+
+  """
+  An npm package.
+  """
+  NPM
+    @deprecated(
+      reason: "NPM will be removed from this enum as this type will be migrated to only be used by the Packages REST API. Removal on 2022-11-21 UTC."
+    )
+
+  """
+  A nuget package.
+  """
+  NUGET
+    @deprecated(
+      reason: "NUGET will be removed from this enum as this type will be migrated to only be used by the Packages REST API. Removal on 2022-11-21 UTC."
+    )
+
+  """
+  A python package.
+  """
+  PYPI
+
+  """
+  A rubygems package.
+  """
+  RUBYGEMS
+    @deprecated(
+      reason: "RUBYGEMS will be removed from this enum as this type will be migrated to only be used by the Packages REST API. Removal on 2022-12-28 UTC."
+    )
+}
+
+"""
+Information about a specific package version.
+"""
+type PackageVersion implements Node {
+  """
+  List of files associated with this package version
+  """
+  files(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering of the returned package files.
+    """
+    orderBy: PackageFileOrder = {field: CREATED_AT, direction: ASC}
+  ): PackageFileConnection!
+
+  """
+  The Node ID of the PackageVersion object
+  """
+  id: ID!
+
+  """
+  The package associated with this version.
+  """
+  package: Package
+
+  """
+  The platform this version was built for.
+  """
+  platform: String
+
+  """
+  Whether or not this version is a pre-release.
+  """
+  preRelease: Boolean!
+
+  """
+  The README of this package version.
+  """
+  readme: String
+
+  """
+  The release associated with this package version.
+  """
+  release: Release
+
+  """
+  Statistics about package activity.
+  """
+  statistics: PackageVersionStatistics
+
+  """
+  The package version summary.
+  """
+  summary: String
+
+  """
+  The version string.
+  """
+  version: String!
+}
+
+"""
+The connection type for PackageVersion.
+"""
+type PackageVersionConnection {
+  """
+  A list of edges.
+  """
+  edges: [PackageVersionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PackageVersion]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PackageVersionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PackageVersion
+}
+
+"""
+Ways in which lists of package versions can be ordered upon return.
+"""
+input PackageVersionOrder {
+  """
+  The direction in which to order package versions by the specified field.
+  """
+  direction: OrderDirection
+
+  """
+  The field in which to order package versions by.
+  """
+  field: PackageVersionOrderField
+}
+
+"""
+Properties by which package version connections can be ordered.
+"""
+enum PackageVersionOrderField {
+  """
+  Order package versions by creation time
+  """
+  CREATED_AT
+}
+
+"""
+Represents a object that contains package version activity statistics such as downloads.
+"""
+type PackageVersionStatistics {
+  """
+  Number of times the package was downloaded since it was created.
+  """
+  downloadsTotalCount: Int!
+}
+
+"""
+Information about pagination in a connection.
+"""
+type PageInfo {
+  """
+  When paginating forwards, the cursor to continue.
+  """
+  endCursor: String
+
+  """
+  When paginating forwards, are there more items?
+  """
+  hasNextPage: Boolean!
+
+  """
+  When paginating backwards, are there more items?
+  """
+  hasPreviousPage: Boolean!
+
+  """
+  When paginating backwards, the cursor to continue.
+  """
+  startCursor: String
+}
+
+"""
+The possible types of patch statuses.
+"""
+enum PatchStatus {
+  """
+  The file was added. Git status 'A'.
+  """
+  ADDED
+
+  """
+  The file's type was changed. Git status 'T'.
+  """
+  CHANGED
+
+  """
+  The file was copied. Git status 'C'.
+  """
+  COPIED
+
+  """
+  The file was deleted. Git status 'D'.
+  """
+  DELETED
+
+  """
+  The file's contents were changed. Git status 'M'.
+  """
+  MODIFIED
+
+  """
+  The file was renamed. Git status 'R'.
+  """
+  RENAMED
+}
+
+"""
+Types that can grant permissions on a repository to a user
+"""
+union PermissionGranter = Organization | Repository | Team
+
+"""
+A level of permission and source for a user's access to a repository.
+"""
+type PermissionSource {
+  """
+  The organization the repository belongs to.
+  """
+  organization: Organization!
+
+  """
+  The level of access this source has granted to the user.
+  """
+  permission: DefaultRepositoryPermissionField!
+
+  """
+  The name of the role this source has granted to the user.
+  """
+  roleName: String
+
+  """
+  The source of this permission.
+  """
+  source: PermissionGranter!
+}
+
+"""
+Autogenerated input type of PinIssue
+"""
+input PinIssueInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the issue to be pinned
+  """
+  issueId: ID! @possibleTypes(concreteTypes: ["Issue"])
+}
+
+"""
+Autogenerated return type of PinIssue
+"""
+type PinIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue that was pinned
+  """
+  issue: Issue
+}
+
+"""
+Types that can be pinned to a profile page.
+"""
+union PinnableItem = Gist | Repository
+
+"""
+The connection type for PinnableItem.
+"""
+type PinnableItemConnection {
+  """
+  A list of edges.
+  """
+  edges: [PinnableItemEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PinnableItem]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PinnableItemEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PinnableItem
+}
+
+"""
+Represents items that can be pinned to a profile page or dashboard.
+"""
+enum PinnableItemType {
+  """
+  A gist.
+  """
+  GIST
+
+  """
+  An issue.
+  """
+  ISSUE
+
+  """
+  An organization.
+  """
+  ORGANIZATION
+
+  """
+  A project.
+  """
+  PROJECT
+
+  """
+  A pull request.
+  """
+  PULL_REQUEST
+
+  """
+  A repository.
+  """
+  REPOSITORY
+
+  """
+  A team.
+  """
+  TEAM
+
+  """
+  A user.
+  """
+  USER
+}
+
+"""
+A Pinned Discussion is a discussion pinned to a repository's index page.
+"""
+type PinnedDiscussion implements Node & RepositoryNode {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The discussion that was pinned.
+  """
+  discussion: Discussion!
+
+  """
+  Color stops of the chosen gradient
+  """
+  gradientStopColors: [String!]!
+
+  """
+  The Node ID of the PinnedDiscussion object
+  """
+  id: ID!
+
+  """
+  Background texture pattern
+  """
+  pattern: PinnedDiscussionPattern!
+
+  """
+  The actor that pinned this discussion.
+  """
+  pinnedBy: Actor!
+
+  """
+  Preconfigured background gradient option
+  """
+  preconfiguredGradient: PinnedDiscussionGradient
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for PinnedDiscussion.
+"""
+type PinnedDiscussionConnection {
+  """
+  A list of edges.
+  """
+  edges: [PinnedDiscussionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PinnedDiscussion]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PinnedDiscussionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PinnedDiscussion
+}
+
+"""
+Preconfigured gradients that may be used to style discussions pinned within a repository.
+"""
+enum PinnedDiscussionGradient {
+  """
+  A gradient of blue to mint
+  """
+  BLUE_MINT
+
+  """
+  A gradient of blue to purple
+  """
+  BLUE_PURPLE
+
+  """
+  A gradient of pink to blue
+  """
+  PINK_BLUE
+
+  """
+  A gradient of purple to coral
+  """
+  PURPLE_CORAL
+
+  """
+  A gradient of red to orange
+  """
+  RED_ORANGE
+}
+
+"""
+Preconfigured background patterns that may be used to style discussions pinned within a repository.
+"""
+enum PinnedDiscussionPattern {
+  """
+  An upward-facing chevron pattern
+  """
+  CHEVRON_UP
+
+  """
+  A hollow dot pattern
+  """
+  DOT
+
+  """
+  A solid dot pattern
+  """
+  DOT_FILL
+
+  """
+  A heart pattern
+  """
+  HEART_FILL
+
+  """
+  A plus sign pattern
+  """
+  PLUS
+
+  """
+  A lightning bolt pattern
+  """
+  ZAP
+}
+
+"""
+Represents a 'pinned' event on a given issue or pull request.
+"""
+type PinnedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the PinnedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the issue associated with the event.
+  """
+  issue: Issue!
+}
+
+"""
+A Pinned Issue is a issue pinned to a repository's index page.
+"""
+type PinnedIssue implements Node {
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  Identifies the primary key from the database as a BigInt.
+  """
+  fullDatabaseId: BigInt
+
+  """
+  The Node ID of the PinnedIssue object
+  """
+  id: ID!
+
+  """
+  The issue that was pinned.
+  """
+  issue: Issue!
+
+  """
+  The actor that pinned this issue.
+  """
+  pinnedBy: Actor!
+
+  """
+  The repository that this issue was pinned to.
+  """
+  repository: Repository!
+}
+
+"""
+The connection type for PinnedIssue.
+"""
+type PinnedIssueConnection {
+  """
+  A list of edges.
+  """
+  edges: [PinnedIssueEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PinnedIssue]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PinnedIssueEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PinnedIssue
+}
+
+"""
+An ISO-8601 encoded UTC date string with millisecond precision.
+"""
+scalar PreciseDateTime
+
+"""
+Audit log entry for a private_repository_forking.disable event.
+"""
+type PrivateRepositoryForkingDisableAuditEntry implements AuditEntry & EnterpriseAuditEntryData & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+
+  """
+  The Node ID of the PrivateRepositoryForkingDisableAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a private_repository_forking.enable event.
+"""
+type PrivateRepositoryForkingEnableAuditEntry implements AuditEntry & EnterpriseAuditEntryData & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+
+  """
+  The Node ID of the PrivateRepositoryForkingEnableAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+A curatable list of repositories relating to a repository owner, which defaults
+to showing the most popular repositories they own.
+"""
+type ProfileItemShowcase {
+  """
+  Whether or not the owner has pinned any repositories or gists.
+  """
+  hasPinnedItems: Boolean!
+
+  """
+  The repositories and gists in the showcase. If the profile owner has any
+  pinned items, those will be returned. Otherwise, the profile owner's popular
+  repositories will be returned.
+  """
+  items(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PinnableItemConnection!
+}
+
+"""
+Represents any entity on GitHub that has a profile page.
+"""
+interface ProfileOwner {
+  """
+  Determine if this repository owner has any items that can be pinned to their profile.
+  """
+  anyPinnableItems(
+    """
+    Filter to only a particular kind of pinnable item.
+    """
+    type: PinnableItemType
+  ): Boolean!
+
+  """
+  The public profile email.
+  """
+  email: String
+
+  """
+  The Node ID of the ProfileOwner object
+  """
+  id: ID!
+
+  """
+  Showcases a selection of repositories and gists that the profile owner has
+  either curated or that have been selected automatically based on popularity.
+  """
+  itemShowcase: ProfileItemShowcase!
+
+  """
+  The public profile location.
+  """
+  location: String
+
+  """
+  The username used to login.
+  """
+  login: String!
+
+  """
+  The public profile name.
+  """
+  name: String
+
+  """
+  A list of repositories and gists this profile owner can pin to their profile.
+  """
+  pinnableItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter the types of pinnable items that are returned.
+    """
+    types: [PinnableItemType!]
+  ): PinnableItemConnection!
+
+  """
+  A list of repositories and gists this profile owner has pinned to their profile
+  """
+  pinnedItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter the types of pinned items that are returned.
+    """
+    types: [PinnableItemType!]
+  ): PinnableItemConnection!
+
+  """
+  Returns how many more items this profile owner can pin to their profile.
+  """
+  pinnedItemsRemaining: Int!
+
+  """
+  Can the viewer pin repositories and gists to the profile?
+  """
+  viewerCanChangePinnedItems: Boolean!
+
+  """
+  The public profile website URL.
+  """
+  websiteUrl: URI
+}
+
+"""
+Projects manage issues, pull requests and notes within a project owner.
+"""
+type Project implements Closable & Node & Updatable {
+  """
+  The project's description body.
+  """
+  body: String
+
+  """
+  The projects description body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  Indicates if the object is closed (definition of closed may depend on type)
+  """
+  closed: Boolean!
+
+  """
+  Identifies the date and time when the object was closed.
+  """
+  closedAt: DateTime
+
+  """
+  List of columns in the project
+  """
+  columns(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectColumnConnection!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who originally created the project.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the Project object
+  """
+  id: ID!
+
+  """
+  The project's name.
+  """
+  name: String!
+
+  """
+  The project's number.
+  """
+  number: Int!
+
+  """
+  The project's owner. Currently limited to repositories, organizations, and users.
+  """
+  owner: ProjectOwner!
+
+  """
+  List of pending cards in this project
+  """
+  pendingCards(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    A list of archived states to filter the cards by
+    """
+    archivedStates: [ProjectCardArchivedState] = [ARCHIVED, NOT_ARCHIVED]
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectCardConnection!
+
+  """
+  Project progress details.
+  """
+  progress: ProjectProgress!
+
+  """
+  The HTTP path for this project
+  """
+  resourcePath: URI!
+
+  """
+  Whether the project is open or closed.
+  """
+  state: ProjectState!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this project
+  """
+  url: URI!
+
+  """
+  Indicates if the object can be closed by the viewer.
+  """
+  viewerCanClose: Boolean!
+
+  """
+  Indicates if the object can be reopened by the viewer.
+  """
+  viewerCanReopen: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+}
+
+"""
+A card in a project.
+"""
+type ProjectCard implements Node {
+  """
+  The project column this card is associated under. A card may only belong to one
+  project column at a time. The column field will be null if the card is created
+  in a pending state and has yet to be associated with a column. Once cards are
+  associated with a column, they will not become pending in the future.
+  """
+  column: ProjectColumn
+
+  """
+  The card content item
+  """
+  content: ProjectCardItem
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created this card
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the ProjectCard object
+  """
+  id: ID!
+
+  """
+  Whether the card is archived
+  """
+  isArchived: Boolean!
+
+  """
+  The card note
+  """
+  note: String
+
+  """
+  The project that contains this card.
+  """
+  project: Project!
+
+  """
+  The HTTP path for this card
+  """
+  resourcePath: URI!
+
+  """
+  The state of ProjectCard
+  """
+  state: ProjectCardState
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this card
+  """
+  url: URI!
+}
+
+"""
+The possible archived states of a project card.
+"""
+enum ProjectCardArchivedState {
+  """
+  A project card that is archived
+  """
+  ARCHIVED
+
+  """
+  A project card that is not archived
+  """
+  NOT_ARCHIVED
+}
+
+"""
+The connection type for ProjectCard.
+"""
+type ProjectCardConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectCardEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectCard]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectCardEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectCard
+}
+
+"""
+An issue or PR and its owning repository to be used in a project card.
+"""
+input ProjectCardImport {
+  """
+  The issue or pull request number.
+  """
+  number: Int!
+
+  """
+  Repository name with owner (owner/repository).
+  """
+  repository: String!
+}
+
+"""
+Types that can be inside Project Cards.
+"""
+union ProjectCardItem = Issue | PullRequest
+
+"""
+Various content states of a ProjectCard
+"""
+enum ProjectCardState {
+  """
+  The card has content only.
+  """
+  CONTENT_ONLY
+
+  """
+  The card has a note only.
+  """
+  NOTE_ONLY
+
+  """
+  The card is redacted.
+  """
+  REDACTED
+}
+
+"""
+A column inside a project.
+"""
+type ProjectColumn implements Node {
+  """
+  List of cards in the column
+  """
+  cards(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    A list of archived states to filter the cards by
+    """
+    archivedStates: [ProjectCardArchivedState] = [ARCHIVED, NOT_ARCHIVED]
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectCardConnection!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the ProjectColumn object
+  """
+  id: ID!
+
+  """
+  The project column's name.
+  """
+  name: String!
+
+  """
+  The project that contains this column.
+  """
+  project: Project!
+
+  """
+  The semantic purpose of the column
+  """
+  purpose: ProjectColumnPurpose
+
+  """
+  The HTTP path for this project column
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this project column
+  """
+  url: URI!
+}
+
+"""
+The connection type for ProjectColumn.
+"""
+type ProjectColumnConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectColumnEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectColumn]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectColumnEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectColumn
+}
+
+"""
+A project column and a list of its issues and PRs.
+"""
+input ProjectColumnImport {
+  """
+  The name of the column.
+  """
+  columnName: String!
+
+  """
+  A list of issues and pull requests in the column.
+  """
+  issues: [ProjectCardImport!]
+
+  """
+  The position of the column, starting from 0.
+  """
+  position: Int!
+}
+
+"""
+The semantic purpose of the column - todo, in progress, or done.
+"""
+enum ProjectColumnPurpose {
+  """
+  The column contains cards which are complete
+  """
+  DONE
+
+  """
+  The column contains cards which are currently being worked on
+  """
+  IN_PROGRESS
+
+  """
+  The column contains cards still to be worked on
+  """
+  TODO
+}
+
+"""
+A list of projects associated with the owner.
+"""
+type ProjectConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Project]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Project
+}
+
+"""
+Ways in which lists of projects can be ordered upon return.
+"""
+input ProjectOrder {
+  """
+  The direction in which to order projects by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order projects by.
+  """
+  field: ProjectOrderField!
+}
+
+"""
+Properties by which project connections can be ordered.
+"""
+enum ProjectOrderField {
+  """
+  Order projects by creation time
+  """
+  CREATED_AT
+
+  """
+  Order projects by name
+  """
+  NAME
+
+  """
+  Order projects by update time
+  """
+  UPDATED_AT
+}
+
+"""
+Represents an owner of a Project.
+"""
+interface ProjectOwner {
+  """
+  The Node ID of the ProjectOwner object
+  """
+  id: ID!
+
+  """
+  Find project by number.
+  """
+  project(
+    """
+    The project number to find.
+    """
+    number: Int!
+  ): Project
+
+  """
+  A list of projects under the owner.
+  """
+  projects(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for projects returned from the connection
+    """
+    orderBy: ProjectOrder
+
+    """
+    Query to search projects by, currently only searching by name.
+    """
+    search: String
+
+    """
+    A list of states to filter the projects by.
+    """
+    states: [ProjectState!]
+  ): ProjectConnection!
+
+  """
+  The HTTP path listing owners projects
+  """
+  projectsResourcePath: URI!
+
+  """
+  The HTTP URL listing owners projects
+  """
+  projectsUrl: URI!
+
+  """
+  Can the current viewer create new projects on this owner.
+  """
+  viewerCanCreateProjects: Boolean!
+}
+
+"""
+Project progress stats.
+"""
+type ProjectProgress {
+  """
+  The number of done cards.
+  """
+  doneCount: Int!
+
+  """
+  The percentage of done cards.
+  """
+  donePercentage: Float!
+
+  """
+  Whether progress tracking is enabled and cards with purpose exist for this project
+  """
+  enabled: Boolean!
+
+  """
+  The number of in-progress cards.
+  """
+  inProgressCount: Int!
+
+  """
+  The percentage of in-progress cards.
+  """
+  inProgressPercentage: Float!
+
+  """
+  The number of to do cards.
+  """
+  todoCount: Int!
+
+  """
+  The percentage of to do cards.
+  """
+  todoPercentage: Float!
+}
+
+"""
+State of the project; either 'open' or 'closed'
+"""
+enum ProjectState {
+  """
+  The project is closed.
+  """
+  CLOSED
+
+  """
+  The project is open.
+  """
+  OPEN
+}
+
+"""
+GitHub-provided templates for Projects
+"""
+enum ProjectTemplate {
+  """
+  Create a board with v2 triggers to automatically move cards across To do, In progress and Done columns.
+  """
+  AUTOMATED_KANBAN_V2
+
+  """
+  Create a board with triggers to automatically move cards across columns with review automation.
+  """
+  AUTOMATED_REVIEWS_KANBAN
+
+  """
+  Create a board with columns for To do, In progress and Done.
+  """
+  BASIC_KANBAN
+
+  """
+  Create a board to triage and prioritize bugs with To do, priority, and Done columns.
+  """
+  BUG_TRIAGE
+}
+
+"""
+New projects that manage issues, pull requests and drafts using tables and boards.
+"""
+type ProjectV2 implements Closable & Node & Updatable {
+  """
+  Returns true if the project is closed.
+  """
+  closed: Boolean!
+
+  """
+  Identifies the date and time when the object was closed.
+  """
+  closedAt: DateTime
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who originally created the project.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  A field of the project
+  """
+  field(
+    """
+    The name of the field
+    """
+    name: String!
+  ): ProjectV2FieldConfiguration
+
+  """
+  List of fields and their constraints in the project
+  """
+  fields(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for project v2 fields returned from the connection
+    """
+    orderBy: ProjectV2FieldOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2FieldConfigurationConnection!
+
+  """
+  The Node ID of the ProjectV2 object
+  """
+  id: ID!
+
+  """
+  List of items in the project
+  """
+  items(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for project v2 items returned from the connection
+    """
+    orderBy: ProjectV2ItemOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2ItemConnection!
+
+  """
+  The project's number.
+  """
+  number: Int!
+
+  """
+  The project's owner. Currently limited to organizations and users.
+  """
+  owner: ProjectV2Owner!
+
+  """
+  Returns true if the project is public.
+  """
+  public: Boolean!
+
+  """
+  The project's readme.
+  """
+  readme: String
+
+  """
+  The repositories the project is linked to.
+  """
+  repositories(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder = {field: CREATED_AT, direction: DESC}
+  ): RepositoryConnection!
+
+  """
+  The HTTP path for this project
+  """
+  resourcePath: URI!
+
+  """
+  The project's short description.
+  """
+  shortDescription: String
+
+  """
+  The teams the project is linked to.
+  """
+  teams(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for teams returned from this connection.
+    """
+    orderBy: TeamOrder = {field: NAME, direction: ASC}
+  ): TeamConnection!
+
+  """
+  Returns true if this project is a template.
+  """
+  template: Boolean!
+
+  """
+  The project's name.
+  """
+  title: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this project
+  """
+  url: URI!
+
+  """
+  A view of the project
+  """
+  view(
+    """
+    The number of a view belonging to the project
+    """
+    number: Int!
+  ): ProjectV2View
+
+  """
+  Indicates if the object can be closed by the viewer.
+  """
+  viewerCanClose: Boolean!
+
+  """
+  Indicates if the object can be reopened by the viewer.
+  """
+  viewerCanReopen: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  List of views in the project
+  """
+  views(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for project v2 views returned from the connection
+    """
+    orderBy: ProjectV2ViewOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2ViewConnection!
+
+  """
+  A workflow of the project
+  """
+  workflow(
+    """
+    The number of a workflow belonging to the project
+    """
+    number: Int!
+  ): ProjectV2Workflow
+
+  """
+  List of the workflows in the project
+  """
+  workflows(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for project v2 workflows returned from the connection
+    """
+    orderBy: ProjectV2WorkflowOrder = {field: NAME, direction: ASC}
+  ): ProjectV2WorkflowConnection!
+}
+
+"""
+Possible collaborators for a project.
+"""
+union ProjectV2Actor = Team | User
+
+"""
+The connection type for ProjectV2Actor.
+"""
+type ProjectV2ActorConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2ActorEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2Actor]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2ActorEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2Actor
+}
+
+"""
+A collaborator to update on a project. Only one of the userId or teamId should be provided.
+"""
+input ProjectV2Collaborator {
+  """
+  The role to grant the collaborator
+  """
+  role: ProjectV2Roles!
+
+  """
+  The ID of the team as a collaborator.
+  """
+  teamId: ID @possibleTypes(concreteTypes: ["Team"])
+
+  """
+  The ID of the user as a collaborator.
+  """
+  userId: ID @possibleTypes(concreteTypes: ["User"])
+}
+
+"""
+The connection type for ProjectV2.
+"""
+type ProjectV2Connection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2Edge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+The type of a project field.
+"""
+enum ProjectV2CustomFieldType {
+  """
+  Date
+  """
+  DATE
+
+  """
+  Number
+  """
+  NUMBER
+
+  """
+  Single Select
+  """
+  SINGLE_SELECT
+
+  """
+  Text
+  """
+  TEXT
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2Edge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2
+}
+
+"""
+A field inside a project.
+"""
+type ProjectV2Field implements Node & ProjectV2FieldCommon {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The field's type.
+  """
+  dataType: ProjectV2FieldType!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the ProjectV2Field object
+  """
+  id: ID!
+
+  """
+  The project field's name.
+  """
+  name: String!
+
+  """
+  The project that contains this field.
+  """
+  project: ProjectV2!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+Common fields across different project field types
+"""
+interface ProjectV2FieldCommon {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The field's type.
+  """
+  dataType: ProjectV2FieldType!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the ProjectV2FieldCommon object
+  """
+  id: ID!
+
+  """
+  The project field's name.
+  """
+  name: String!
+
+  """
+  The project that contains this field.
+  """
+  project: ProjectV2!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+Configurations for project fields.
+"""
+union ProjectV2FieldConfiguration = ProjectV2Field | ProjectV2IterationField | ProjectV2SingleSelectField
+
+"""
+The connection type for ProjectV2FieldConfiguration.
+"""
+type ProjectV2FieldConfigurationConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2FieldConfigurationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2FieldConfiguration]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2FieldConfigurationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2FieldConfiguration
+}
+
+"""
+The connection type for ProjectV2Field.
+"""
+type ProjectV2FieldConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2FieldEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2Field]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2FieldEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2Field
+}
+
+"""
+Ordering options for project v2 field connections
+"""
+input ProjectV2FieldOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order the project v2 fields by.
+  """
+  field: ProjectV2FieldOrderField!
+}
+
+"""
+Properties by which project v2 field connections can be ordered.
+"""
+enum ProjectV2FieldOrderField {
+  """
+  Order project v2 fields by creation time
+  """
+  CREATED_AT
+
+  """
+  Order project v2 fields by name
+  """
+  NAME
+
+  """
+  Order project v2 fields by position
+  """
+  POSITION
+}
+
+"""
+The type of a project field.
+"""
+enum ProjectV2FieldType {
+  """
+  Assignees
+  """
+  ASSIGNEES
+
+  """
+  Date
+  """
+  DATE
+
+  """
+  Iteration
+  """
+  ITERATION
+
+  """
+  Labels
+  """
+  LABELS
+
+  """
+  Linked Pull Requests
+  """
+  LINKED_PULL_REQUESTS
+
+  """
+  Milestone
+  """
+  MILESTONE
+
+  """
+  Number
+  """
+  NUMBER
+
+  """
+  Repository
+  """
+  REPOSITORY
+
+  """
+  Reviewers
+  """
+  REVIEWERS
+
+  """
+  Single Select
+  """
+  SINGLE_SELECT
+
+  """
+  Text
+  """
+  TEXT
+
+  """
+  Title
+  """
+  TITLE
+
+  """
+  Tracked by
+  """
+  TRACKED_BY
+
+  """
+  Tracks
+  """
+  TRACKS
+}
+
+"""
+The values that can be used to update a field of an item inside a Project. Only 1 value can be updated at a time.
+"""
+input ProjectV2FieldValue {
+  """
+  The ISO 8601 date to set on the field.
+  """
+  date: Date
+
+  """
+  The id of the iteration to set on the field.
+  """
+  iterationId: String
+
+  """
+  The number to set on the field.
+  """
+  number: Float
+
+  """
+  The id of the single select option to set on the field.
+  """
+  singleSelectOptionId: String
+
+  """
+  The text to set on the field.
+  """
+  text: String
+}
+
+"""
+Ways in which to filter lists of projects.
+"""
+input ProjectV2Filters {
+  """
+  List project v2 filtered by the state given.
+  """
+  state: ProjectV2State
+}
+
+"""
+An item within a Project.
+"""
+type ProjectV2Item implements Node {
+  """
+  The content of the referenced draft issue, issue, or pull request
+  """
+  content: ProjectV2ItemContent
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created the item.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The field value of the first project field which matches the 'name' argument that is set on the item.
+  """
+  fieldValueByName(
+    """
+    The name of the field to return the field value of
+    """
+    name: String!
+  ): ProjectV2ItemFieldValue
+
+  """
+  The field values that are set on the item.
+  """
+  fieldValues(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for project v2 item field values returned from the connection
+    """
+    orderBy: ProjectV2ItemFieldValueOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2ItemFieldValueConnection!
+
+  """
+  The Node ID of the ProjectV2Item object
+  """
+  id: ID!
+
+  """
+  Whether the item is archived.
+  """
+  isArchived: Boolean!
+
+  """
+  The project that contains this item.
+  """
+  project: ProjectV2!
+
+  """
+  The type of the item.
+  """
+  type: ProjectV2ItemType!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for ProjectV2Item.
+"""
+type ProjectV2ItemConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2ItemEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2Item]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Types that can be inside Project Items.
+"""
+union ProjectV2ItemContent = DraftIssue | Issue | PullRequest
+
+"""
+An edge in a connection.
+"""
+type ProjectV2ItemEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2Item
+}
+
+"""
+The value of a date field in a Project item.
+"""
+type ProjectV2ItemFieldDateValue implements Node & ProjectV2ItemFieldValueCommon {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created the item.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  Date value for the field
+  """
+  date: Date
+
+  """
+  The project field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The Node ID of the ProjectV2ItemFieldDateValue object
+  """
+  id: ID!
+
+  """
+  The project item that contains this value.
+  """
+  item: ProjectV2Item!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The value of an iteration field in a Project item.
+"""
+type ProjectV2ItemFieldIterationValue implements Node & ProjectV2ItemFieldValueCommon {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created the item.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The duration of the iteration in days.
+  """
+  duration: Int!
+
+  """
+  The project field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The Node ID of the ProjectV2ItemFieldIterationValue object
+  """
+  id: ID!
+
+  """
+  The project item that contains this value.
+  """
+  item: ProjectV2Item!
+
+  """
+  The ID of the iteration.
+  """
+  iterationId: String!
+
+  """
+  The start date of the iteration.
+  """
+  startDate: Date!
+
+  """
+  The title of the iteration.
+  """
+  title: String!
+
+  """
+  The title of the iteration, with HTML.
+  """
+  titleHTML: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The value of the labels field in a Project item.
+"""
+type ProjectV2ItemFieldLabelValue {
+  """
+  The field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  Labels value of a field
+  """
+  labels(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): LabelConnection
+}
+
+"""
+The value of a milestone field in a Project item.
+"""
+type ProjectV2ItemFieldMilestoneValue {
+  """
+  The field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  Milestone value of a field
+  """
+  milestone: Milestone
+}
+
+"""
+The value of a number field in a Project item.
+"""
+type ProjectV2ItemFieldNumberValue implements Node & ProjectV2ItemFieldValueCommon {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created the item.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The project field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The Node ID of the ProjectV2ItemFieldNumberValue object
+  """
+  id: ID!
+
+  """
+  The project item that contains this value.
+  """
+  item: ProjectV2Item!
+
+  """
+  Number as a float(8)
+  """
+  number: Float
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The value of a pull request field in a Project item.
+"""
+type ProjectV2ItemFieldPullRequestValue {
+  """
+  The field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The pull requests for this field
+  """
+  pullRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pull requests.
+    """
+    orderBy: PullRequestOrder = {field: CREATED_AT, direction: ASC}
+  ): PullRequestConnection
+}
+
+"""
+The value of a repository field in a Project item.
+"""
+type ProjectV2ItemFieldRepositoryValue {
+  """
+  The field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The repository for this field.
+  """
+  repository: Repository
+}
+
+"""
+The value of a reviewers field in a Project item.
+"""
+type ProjectV2ItemFieldReviewerValue {
+  """
+  The field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The reviewers for this field.
+  """
+  reviewers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): RequestedReviewerConnection
+}
+
+"""
+The value of a single select field in a Project item.
+"""
+type ProjectV2ItemFieldSingleSelectValue implements Node & ProjectV2ItemFieldValueCommon {
+  """
+  The color applied to the selected single-select option.
+  """
+  color: ProjectV2SingleSelectFieldOptionColor!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created the item.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  A plain-text description of the selected single-select option, such as what the option means.
+  """
+  description: String
+
+  """
+  The description of the selected single-select option, including HTML tags.
+  """
+  descriptionHTML: String
+
+  """
+  The project field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The Node ID of the ProjectV2ItemFieldSingleSelectValue object
+  """
+  id: ID!
+
+  """
+  The project item that contains this value.
+  """
+  item: ProjectV2Item!
+
+  """
+  The name of the selected single select option.
+  """
+  name: String
+
+  """
+  The html name of the selected single select option.
+  """
+  nameHTML: String
+
+  """
+  The id of the selected single select option.
+  """
+  optionId: String
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The value of a text field in a Project item.
+"""
+type ProjectV2ItemFieldTextValue implements Node & ProjectV2ItemFieldValueCommon {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created the item.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The project field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The Node ID of the ProjectV2ItemFieldTextValue object
+  """
+  id: ID!
+
+  """
+  The project item that contains this value.
+  """
+  item: ProjectV2Item!
+
+  """
+  Text value of a field
+  """
+  text: String
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The value of a user field in a Project item.
+"""
+type ProjectV2ItemFieldUserValue {
+  """
+  The field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The users for this field
+  """
+  users(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection
+}
+
+"""
+Project field values
+"""
+union ProjectV2ItemFieldValue =
+    ProjectV2ItemFieldDateValue
+  | ProjectV2ItemFieldIterationValue
+  | ProjectV2ItemFieldLabelValue
+  | ProjectV2ItemFieldMilestoneValue
+  | ProjectV2ItemFieldNumberValue
+  | ProjectV2ItemFieldPullRequestValue
+  | ProjectV2ItemFieldRepositoryValue
+  | ProjectV2ItemFieldReviewerValue
+  | ProjectV2ItemFieldSingleSelectValue
+  | ProjectV2ItemFieldTextValue
+  | ProjectV2ItemFieldUserValue
+
+"""
+Common fields across different project field value types
+"""
+interface ProjectV2ItemFieldValueCommon {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created the item.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The project field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The Node ID of the ProjectV2ItemFieldValueCommon object
+  """
+  id: ID!
+
+  """
+  The project item that contains this value.
+  """
+  item: ProjectV2Item!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for ProjectV2ItemFieldValue.
+"""
+type ProjectV2ItemFieldValueConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2ItemFieldValueEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2ItemFieldValue]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2ItemFieldValueEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2ItemFieldValue
+}
+
+"""
+Ordering options for project v2 item field value connections
+"""
+input ProjectV2ItemFieldValueOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order the project v2 item field values by.
+  """
+  field: ProjectV2ItemFieldValueOrderField!
+}
+
+"""
+Properties by which project v2 item field value connections can be ordered.
+"""
+enum ProjectV2ItemFieldValueOrderField {
+  """
+  Order project v2 item field values by the their position in the project
+  """
+  POSITION
+}
+
+"""
+Ordering options for project v2 item connections
+"""
+input ProjectV2ItemOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order the project v2 items by.
+  """
+  field: ProjectV2ItemOrderField!
+}
+
+"""
+Properties by which project v2 item connections can be ordered.
+"""
+enum ProjectV2ItemOrderField {
+  """
+  Order project v2 items by the their position in the project
+  """
+  POSITION
+}
+
+"""
+The type of a project item.
+"""
+enum ProjectV2ItemType {
+  """
+  Draft Issue
+  """
+  DRAFT_ISSUE
+
+  """
+  Issue
+  """
+  ISSUE
+
+  """
+  Pull Request
+  """
+  PULL_REQUEST
+
+  """
+  Redacted Item
+  """
+  REDACTED
+}
+
+"""
+An iteration field inside a project.
+"""
+type ProjectV2IterationField implements Node & ProjectV2FieldCommon {
+  """
+  Iteration configuration settings
+  """
+  configuration: ProjectV2IterationFieldConfiguration!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The field's type.
+  """
+  dataType: ProjectV2FieldType!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the ProjectV2IterationField object
+  """
+  id: ID!
+
+  """
+  The project field's name.
+  """
+  name: String!
+
+  """
+  The project that contains this field.
+  """
+  project: ProjectV2!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+Iteration field configuration for a project.
+"""
+type ProjectV2IterationFieldConfiguration {
+  """
+  The iteration's completed iterations
+  """
+  completedIterations: [ProjectV2IterationFieldIteration!]!
+
+  """
+  The iteration's duration in days
+  """
+  duration: Int!
+
+  """
+  The iteration's iterations
+  """
+  iterations: [ProjectV2IterationFieldIteration!]!
+
+  """
+  The iteration's start day of the week
+  """
+  startDay: Int!
+}
+
+"""
+Iteration field iteration settings for a project.
+"""
+type ProjectV2IterationFieldIteration {
+  """
+  The iteration's duration in days
+  """
+  duration: Int!
+
+  """
+  The iteration's ID.
+  """
+  id: String!
+
+  """
+  The iteration's start date
+  """
+  startDate: Date!
+
+  """
+  The iteration's title.
+  """
+  title: String!
+
+  """
+  The iteration's html title.
+  """
+  titleHTML: String!
+}
+
+"""
+Ways in which lists of projects can be ordered upon return.
+"""
+input ProjectV2Order {
+  """
+  The direction in which to order projects by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order projects by.
+  """
+  field: ProjectV2OrderField!
+}
+
+"""
+Properties by which projects can be ordered.
+"""
+enum ProjectV2OrderField {
+  """
+  The project's date and time of creation
+  """
+  CREATED_AT
+
+  """
+  The project's number
+  """
+  NUMBER
+
+  """
+  The project's title
+  """
+  TITLE
+
+  """
+  The project's date and time of update
+  """
+  UPDATED_AT
+}
+
+"""
+Represents an owner of a project (beta).
+"""
+interface ProjectV2Owner {
+  """
+  The Node ID of the ProjectV2Owner object
+  """
+  id: ID!
+
+  """
+  Find a project by number.
+  """
+  projectV2(
+    """
+    The project number.
+    """
+    number: Int!
+  ): ProjectV2
+
+  """
+  A list of projects under the owner.
+  """
+  projectsV2(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    How to order the returned projects.
+    """
+    orderBy: ProjectV2Order = {field: NUMBER, direction: DESC}
+
+    """
+    A project to search for under the the owner.
+    """
+    query: String
+  ): ProjectV2Connection!
+}
+
+"""
+Recent projects for the owner.
+"""
+interface ProjectV2Recent {
+  """
+  Recent projects that this user has modified in the context of the owner.
+  """
+  recentProjects(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2Connection!
+}
+
+"""
+The possible roles of a collaborator on a project.
+"""
+enum ProjectV2Roles {
+  """
+  The collaborator can view, edit, and manage the settings of the project
+  """
+  ADMIN
+
+  """
+  The collaborator has no direct access to the project
+  """
+  NONE
+
+  """
+  The collaborator can view the project
+  """
+  READER
+
+  """
+  The collaborator can view and edit the project
+  """
+  WRITER
+}
+
+"""
+A single select field inside a project.
+"""
+type ProjectV2SingleSelectField implements Node & ProjectV2FieldCommon {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The field's type.
+  """
+  dataType: ProjectV2FieldType!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the ProjectV2SingleSelectField object
+  """
+  id: ID!
+
+  """
+  The project field's name.
+  """
+  name: String!
+
+  """
+  Options for the single select field
+  """
+  options(
+    """
+    Filter returned options to only those matching these names, case insensitive.
+    """
+    names: [String!]
+  ): [ProjectV2SingleSelectFieldOption!]!
+
+  """
+  The project that contains this field.
+  """
+  project: ProjectV2!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+Single select field option for a configuration for a project.
+"""
+type ProjectV2SingleSelectFieldOption {
+  """
+  The option's display color.
+  """
+  color: ProjectV2SingleSelectFieldOptionColor!
+
+  """
+  The option's plain-text description.
+  """
+  description: String!
+
+  """
+  The option's description, possibly containing HTML.
+  """
+  descriptionHTML: String!
+
+  """
+  The option's ID.
+  """
+  id: String!
+
+  """
+  The option's name.
+  """
+  name: String!
+
+  """
+  The option's html name.
+  """
+  nameHTML: String!
+}
+
+"""
+The display color of a single-select field option.
+"""
+enum ProjectV2SingleSelectFieldOptionColor {
+  """
+  BLUE
+  """
+  BLUE
+
+  """
+  GRAY
+  """
+  GRAY
+
+  """
+  GREEN
+  """
+  GREEN
+
+  """
+  ORANGE
+  """
+  ORANGE
+
+  """
+  PINK
+  """
+  PINK
+
+  """
+  PURPLE
+  """
+  PURPLE
+
+  """
+  RED
+  """
+  RED
+
+  """
+  YELLOW
+  """
+  YELLOW
+}
+
+"""
+Represents a single select field option
+"""
+input ProjectV2SingleSelectFieldOptionInput {
+  """
+  The display color of the option
+  """
+  color: ProjectV2SingleSelectFieldOptionColor!
+
+  """
+  The description text of the option
+  """
+  description: String!
+
+  """
+  The name of the option
+  """
+  name: String!
+}
+
+"""
+Represents a sort by field and direction.
+"""
+type ProjectV2SortBy {
+  """
+  The direction of the sorting. Possible values are ASC and DESC.
+  """
+  direction: OrderDirection!
+
+  """
+  The field by which items are sorted.
+  """
+  field: ProjectV2Field!
+}
+
+"""
+The connection type for ProjectV2SortBy.
+"""
+type ProjectV2SortByConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2SortByEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2SortBy]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2SortByEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2SortBy
+}
+
+"""
+Represents a sort by field and direction.
+"""
+type ProjectV2SortByField {
+  """
+  The direction of the sorting. Possible values are ASC and DESC.
+  """
+  direction: OrderDirection!
+
+  """
+  The field by which items are sorted.
+  """
+  field: ProjectV2FieldConfiguration!
+}
+
+"""
+The connection type for ProjectV2SortByField.
+"""
+type ProjectV2SortByFieldConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2SortByFieldEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2SortByField]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2SortByFieldEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2SortByField
+}
+
+"""
+The possible states of a project v2.
+"""
+enum ProjectV2State {
+  """
+  A project v2 that has been closed
+  """
+  CLOSED
+
+  """
+  A project v2 that is still open
+  """
+  OPEN
+}
+
+"""
+A view within a ProjectV2.
+"""
+type ProjectV2View implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The view's visible fields.
+  """
+  fields(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the project v2 fields returned from the connection.
+    """
+    orderBy: ProjectV2FieldOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2FieldConfigurationConnection
+
+  """
+  The project view's filter.
+  """
+  filter: String
+
+  """
+  The view's group-by field.
+  """
+  groupBy(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the project v2 fields returned from the connection.
+    """
+    orderBy: ProjectV2FieldOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2FieldConnection
+    @deprecated(
+      reason: "The `ProjectV2View#order_by` API is deprecated in favour of the more capable `ProjectV2View#group_by_field` API. Check out the `ProjectV2View#group_by_fields` API as an example for the more capable alternative. Removal on 2023-04-01 UTC."
+    )
+
+  """
+  The view's group-by field.
+  """
+  groupByFields(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the project v2 fields returned from the connection.
+    """
+    orderBy: ProjectV2FieldOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2FieldConfigurationConnection
+
+  """
+  The Node ID of the ProjectV2View object
+  """
+  id: ID!
+
+  """
+  The project view's layout.
+  """
+  layout: ProjectV2ViewLayout!
+
+  """
+  The project view's name.
+  """
+  name: String!
+
+  """
+  The project view's number.
+  """
+  number: Int!
+
+  """
+  The project that contains this view.
+  """
+  project: ProjectV2!
+
+  """
+  The view's sort-by config.
+  """
+  sortBy(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2SortByConnection
+    @deprecated(
+      reason: "The `ProjectV2View#sort_by` API is deprecated in favour of the more capable `ProjectV2View#sort_by_fields` API. Check out the `ProjectV2View#sort_by_fields` API as an example for the more capable alternative. Removal on 2023-04-01 UTC."
+    )
+
+  """
+  The view's sort-by config.
+  """
+  sortByFields(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2SortByFieldConnection
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The view's vertical-group-by field.
+  """
+  verticalGroupBy(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the project v2 fields returned from the connection.
+    """
+    orderBy: ProjectV2FieldOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2FieldConnection
+    @deprecated(
+      reason: "The `ProjectV2View#vertical_group_by` API is deprecated in favour of the more capable `ProjectV2View#vertical_group_by_fields` API. Check out the `ProjectV2View#vertical_group_by_fields` API as an example for the more capable alternative. Removal on 2023-04-01 UTC."
+    )
+
+  """
+  The view's vertical-group-by field.
+  """
+  verticalGroupByFields(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the project v2 fields returned from the connection.
+    """
+    orderBy: ProjectV2FieldOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2FieldConfigurationConnection
+
+  """
+  The view's visible fields.
+  """
+  visibleFields(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the project v2 fields returned from the connection.
+    """
+    orderBy: ProjectV2FieldOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2FieldConnection
+    @deprecated(
+      reason: "The `ProjectV2View#visibleFields` API is deprecated in favour of the more capable `ProjectV2View#fields` API. Check out the `ProjectV2View#fields` API as an example for the more capable alternative. Removal on 2023-01-01 UTC."
+    )
+}
+
+"""
+The connection type for ProjectV2View.
+"""
+type ProjectV2ViewConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2ViewEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2View]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2ViewEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2View
+}
+
+"""
+The layout of a project v2 view.
+"""
+enum ProjectV2ViewLayout {
+  """
+  Board layout
+  """
+  BOARD_LAYOUT
+
+  """
+  Roadmap layout
+  """
+  ROADMAP_LAYOUT
+
+  """
+  Table layout
+  """
+  TABLE_LAYOUT
+}
+
+"""
+Ordering options for project v2 view connections
+"""
+input ProjectV2ViewOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order the project v2 views by.
+  """
+  field: ProjectV2ViewOrderField!
+}
+
+"""
+Properties by which project v2 view connections can be ordered.
+"""
+enum ProjectV2ViewOrderField {
+  """
+  Order project v2 views by creation time
+  """
+  CREATED_AT
+
+  """
+  Order project v2 views by name
+  """
+  NAME
+
+  """
+  Order project v2 views by position
+  """
+  POSITION
+}
+
+"""
+A workflow inside a project.
+"""
+type ProjectV2Workflow implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  Whether the workflow is enabled.
+  """
+  enabled: Boolean!
+
+  """
+  The Node ID of the ProjectV2Workflow object
+  """
+  id: ID!
+
+  """
+  The name of the workflow.
+  """
+  name: String!
+
+  """
+  The number of the workflow.
+  """
+  number: Int!
+
+  """
+  The project that contains this workflow.
+  """
+  project: ProjectV2!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for ProjectV2Workflow.
+"""
+type ProjectV2WorkflowConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2WorkflowEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2Workflow]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2WorkflowEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2Workflow
+}
+
+"""
+Ordering options for project v2 workflows connections
+"""
+input ProjectV2WorkflowOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order the project v2 workflows by.
+  """
+  field: ProjectV2WorkflowsOrderField!
+}
+
+"""
+Properties by which project workflows can be ordered.
+"""
+enum ProjectV2WorkflowsOrderField {
+  """
+  The date and time of the workflow creation
+  """
+  CREATED_AT
+
+  """
+  The name of the workflow
+  """
+  NAME
+
+  """
+  The number of the workflow
+  """
+  NUMBER
+
+  """
+  The date and time of the workflow update
+  """
+  UPDATED_AT
+}
+
+"""
+A user's public key.
+"""
+type PublicKey implements Node {
+  """
+  The last time this authorization was used to perform an action. Values will be null for keys not owned by the user.
+  """
+  accessedAt: DateTime
+
+  """
+  Identifies the date and time when the key was created. Keys created before
+  March 5th, 2014 have inaccurate values. Values will be null for keys not owned by the user.
+  """
+  createdAt: DateTime
+
+  """
+  The fingerprint for this PublicKey.
+  """
+  fingerprint: String!
+
+  """
+  The Node ID of the PublicKey object
+  """
+  id: ID!
+
+  """
+  Whether this PublicKey is read-only or not. Values will be null for keys not owned by the user.
+  """
+  isReadOnly: Boolean
+
+  """
+  The public key string.
+  """
+  key: String!
+
+  """
+  Identifies the date and time when the key was updated. Keys created before
+  March 5th, 2014 may have inaccurate values. Values will be null for keys not
+  owned by the user.
+  """
+  updatedAt: DateTime
+}
+
+"""
+The connection type for PublicKey.
+"""
+type PublicKeyConnection {
+  """
+  A list of edges.
+  """
+  edges: [PublicKeyEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PublicKey]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PublicKeyEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PublicKey
+}
+
+"""
+Autogenerated input type of PublishSponsorsTier
+"""
+input PublishSponsorsTierInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the draft tier to publish.
+  """
+  tierId: ID! @possibleTypes(concreteTypes: ["SponsorsTier"])
+}
+
+"""
+Autogenerated return type of PublishSponsorsTier
+"""
+type PublishSponsorsTierPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The tier that was published.
+  """
+  sponsorsTier: SponsorsTier
+}
+
+"""
+A repository pull request.
+"""
+type PullRequest implements Assignable & Closable & Comment & Labelable & Lockable & Node & ProjectV2Owner & Reactable & RepositoryNode & Subscribable & UniformResourceLocatable & Updatable & UpdatableComment {
+  """
+  Reason that the conversation was locked.
+  """
+  activeLockReason: LockReason
+
+  """
+  The number of additions in this pull request.
+  """
+  additions: Int!
+
+  """
+  A list of Users assigned to this object.
+  """
+  assignees(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  Returns the auto-merge request object if one exists for this pull request.
+  """
+  autoMergeRequest: AutoMergeRequest
+
+  """
+  Identifies the base Ref associated with the pull request.
+  """
+  baseRef: Ref
+
+  """
+  Identifies the name of the base Ref associated with the pull request, even if the ref has been deleted.
+  """
+  baseRefName: String!
+
+  """
+  Identifies the oid of the base ref associated with the pull request, even if the ref has been deleted.
+  """
+  baseRefOid: GitObjectID!
+
+  """
+  The repository associated with this pull request's base Ref.
+  """
+  baseRepository: Repository
+
+  """
+  The body as Markdown.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  Whether or not the pull request is rebaseable.
+  """
+  canBeRebased: Boolean! @preview(toggledBy: "merge-info-preview")
+
+  """
+  The number of changed files in this pull request.
+  """
+  changedFiles: Int!
+
+  """
+  The HTTP path for the checks of this pull request.
+  """
+  checksResourcePath: URI!
+
+  """
+  The HTTP URL for the checks of this pull request.
+  """
+  checksUrl: URI!
+
+  """
+  `true` if the pull request is closed
+  """
+  closed: Boolean!
+
+  """
+  Identifies the date and time when the object was closed.
+  """
+  closedAt: DateTime
+
+  """
+  List of issues that were may be closed by this pull request
+  """
+  closingIssuesReferences(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for issues returned from the connection
+    """
+    orderBy: IssueOrder
+
+    """
+    Return only manually linked Issues
+    """
+    userLinkedOnly: Boolean = false
+  ): IssueConnection
+
+  """
+  A list of comments associated with the pull request.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for issue comments returned from the connection.
+    """
+    orderBy: IssueCommentOrder
+  ): IssueCommentConnection!
+
+  """
+  A list of commits present in this pull request's head branch not present in the base branch.
+  """
+  commits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PullRequestCommitConnection!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The number of deletions in this pull request.
+  """
+  deletions: Int!
+
+  """
+  The actor who edited this pull request's body.
+  """
+  editor: Actor
+
+  """
+  Lists the files changed within this pull request.
+  """
+  files(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PullRequestChangedFileConnection
+
+  """
+  Identifies the head Ref associated with the pull request.
+  """
+  headRef: Ref
+
+  """
+  Identifies the name of the head Ref associated with the pull request, even if the ref has been deleted.
+  """
+  headRefName: String!
+
+  """
+  Identifies the oid of the head ref associated with the pull request, even if the ref has been deleted.
+  """
+  headRefOid: GitObjectID!
+
+  """
+  The repository associated with this pull request's head Ref.
+  """
+  headRepository: Repository
+
+  """
+  The owner of the repository associated with this pull request's head Ref.
+  """
+  headRepositoryOwner: RepositoryOwner
+
+  """
+  The hovercard information for this issue
+  """
+  hovercard(
+    """
+    Whether or not to include notification contexts
+    """
+    includeNotificationContexts: Boolean = true
+  ): Hovercard!
+
+  """
+  The Node ID of the PullRequest object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  The head and base repositories are different.
+  """
+  isCrossRepository: Boolean!
+
+  """
+  Identifies if the pull request is a draft.
+  """
+  isDraft: Boolean!
+
+  """
+  Is this pull request read by the viewer
+  """
+  isReadByViewer: Boolean
+
+  """
+  A list of labels associated with the object.
+  """
+  labels(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for labels returned from the connection.
+    """
+    orderBy: LabelOrder = {field: CREATED_AT, direction: ASC}
+  ): LabelConnection
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  A list of latest reviews per user associated with the pull request.
+  """
+  latestOpinionatedReviews(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Only return reviews from user who have write access to the repository
+    """
+    writersOnly: Boolean = false
+  ): PullRequestReviewConnection
+
+  """
+  A list of latest reviews per user associated with the pull request that are not also pending review.
+  """
+  latestReviews(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PullRequestReviewConnection
+
+  """
+  `true` if the pull request is locked
+  """
+  locked: Boolean!
+
+  """
+  Indicates whether maintainers can modify the pull request.
+  """
+  maintainerCanModify: Boolean!
+
+  """
+  The commit that was created when this pull request was merged.
+  """
+  mergeCommit: Commit
+
+  """
+  The merge queue entry of the pull request in the base branch's merge queue
+  """
+  mergeQueueEntry: MergeQueueEntry
+
+  """
+  Detailed information about the current pull request merge state status.
+  """
+  mergeStateStatus: MergeStateStatus! @preview(toggledBy: "merge-info-preview")
+
+  """
+  Whether or not the pull request can be merged based on the existence of merge conflicts.
+  """
+  mergeable: MergeableState!
+
+  """
+  Whether or not the pull request was merged.
+  """
+  merged: Boolean!
+
+  """
+  The date and time that the pull request was merged.
+  """
+  mergedAt: DateTime
+
+  """
+  The actor who merged the pull request.
+  """
+  mergedBy: Actor
+
+  """
+  Identifies the milestone associated with the pull request.
+  """
+  milestone: Milestone
+
+  """
+  Identifies the pull request number.
+  """
+  number: Int!
+
+  """
+  A list of Users that are participating in the Pull Request conversation.
+  """
+  participants(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  The permalink to the pull request.
+  """
+  permalink: URI!
+
+  """
+  The commit that GitHub automatically generated to test if this pull request
+  could be merged. This field will not return a value if the pull request is
+  merged, or if the test merge commit is still being generated. See the
+  `mergeable` field for more details on the mergeability of the pull request.
+  """
+  potentialMergeCommit: Commit
+
+  """
+  List of project cards associated with this pull request.
+  """
+  projectCards(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    A list of archived states to filter the cards by
+    """
+    archivedStates: [ProjectCardArchivedState] = [ARCHIVED, NOT_ARCHIVED]
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectCardConnection!
+
+  """
+  List of project items associated with this pull request.
+  """
+  projectItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Include archived items.
+    """
+    includeArchived: Boolean = true
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2ItemConnection!
+
+  """
+  Find a project by number.
+  """
+  projectV2(
+    """
+    The project number.
+    """
+    number: Int!
+  ): ProjectV2
+
+  """
+  A list of projects under the owner.
+  """
+  projectsV2(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    How to order the returned projects.
+    """
+    orderBy: ProjectV2Order = {field: NUMBER, direction: DESC}
+
+    """
+    A project to search for under the the owner.
+    """
+    query: String
+  ): ProjectV2Connection!
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this pull request.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP path for reverting this pull request.
+  """
+  revertResourcePath: URI!
+
+  """
+  The HTTP URL for reverting this pull request.
+  """
+  revertUrl: URI!
+
+  """
+  The current status of this pull request with respect to code review.
+  """
+  reviewDecision: PullRequestReviewDecision
+
+  """
+  A list of review requests associated with the pull request.
+  """
+  reviewRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ReviewRequestConnection
+
+  """
+  The list of all review threads for this pull request.
+  """
+  reviewThreads(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PullRequestReviewThreadConnection!
+
+  """
+  A list of reviews associated with the pull request.
+  """
+  reviews(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Filter by author of the review.
+    """
+    author: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    A list of states to filter the reviews.
+    """
+    states: [PullRequestReviewState!]
+  ): PullRequestReviewConnection
+
+  """
+  Identifies the state of the pull request.
+  """
+  state: PullRequestState!
+
+  """
+  A list of reviewer suggestions based on commit history and past review comments.
+  """
+  suggestedReviewers: [SuggestedReviewer]!
+
+  """
+  A list of events, comments, commits, etc. associated with the pull request.
+  """
+  timeline(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows filtering timeline events by a `since` timestamp.
+    """
+    since: DateTime
+  ): PullRequestTimelineConnection!
+    @deprecated(reason: "`timeline` will be removed Use PullRequest.timelineItems instead. Removal on 2020-10-01 UTC.")
+
+  """
+  A list of events, comments, commits, etc. associated with the pull request.
+  """
+  timelineItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Filter timeline items by type.
+    """
+    itemTypes: [PullRequestTimelineItemsItemType!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter timeline items by a `since` timestamp.
+    """
+    since: DateTime
+
+    """
+    Skips the first _n_ elements in the list.
+    """
+    skip: Int
+  ): PullRequestTimelineItemsConnection!
+
+  """
+  Identifies the pull request title.
+  """
+  title: String!
+
+  """
+  Identifies the pull request title rendered to HTML.
+  """
+  titleHTML: HTML!
+
+  """
+  Returns a count of how many comments this pull request has received.
+  """
+  totalCommentsCount: Int
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this pull request.
+  """
+  url: URI!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Whether or not the viewer can apply suggestion.
+  """
+  viewerCanApplySuggestion: Boolean!
+
+  """
+  Indicates if the object can be closed by the viewer.
+  """
+  viewerCanClose: Boolean!
+
+  """
+  Check if the viewer can restore the deleted head ref.
+  """
+  viewerCanDeleteHeadRef: Boolean!
+
+  """
+  Whether or not the viewer can disable auto-merge
+  """
+  viewerCanDisableAutoMerge: Boolean!
+
+  """
+  Can the viewer edit files within this pull request.
+  """
+  viewerCanEditFiles: Boolean!
+
+  """
+  Whether or not the viewer can enable auto-merge
+  """
+  viewerCanEnableAutoMerge: Boolean!
+
+  """
+  Indicates whether the viewer can bypass branch protections and merge the pull request immediately
+  """
+  viewerCanMergeAsAdmin: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Indicates if the object can be reopened by the viewer.
+  """
+  viewerCanReopen: Boolean!
+
+  """
+  Check if the viewer is able to change their subscription status for the repository.
+  """
+  viewerCanSubscribe: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Whether or not the viewer can update the head ref of this PR, by merging or rebasing the base ref.
+  If the head ref is up to date or unable to be updated by this user, this will return false.
+  """
+  viewerCanUpdateBranch: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+
+  """
+  The latest review given from the viewer.
+  """
+  viewerLatestReview: PullRequestReview
+
+  """
+  The person who has requested the viewer for review on this pull request.
+  """
+  viewerLatestReviewRequest: ReviewRequest
+
+  """
+  The merge body text for the viewer and method.
+  """
+  viewerMergeBodyText(
+    """
+    The merge method for the message.
+    """
+    mergeType: PullRequestMergeMethod
+  ): String!
+
+  """
+  The merge headline text for the viewer and method.
+  """
+  viewerMergeHeadlineText(
+    """
+    The merge method for the message.
+    """
+    mergeType: PullRequestMergeMethod
+  ): String!
+
+  """
+  Identifies if the viewer is watching, not watching, or ignoring the subscribable entity.
+  """
+  viewerSubscription: SubscriptionState
+}
+
+"""
+The possible methods for updating a pull request's head branch with the base branch.
+"""
+enum PullRequestBranchUpdateMethod {
+  """
+  Update branch via merge
+  """
+  MERGE
+
+  """
+  Update branch via rebase
+  """
+  REBASE
+}
+
+"""
+A file changed in a pull request.
+"""
+type PullRequestChangedFile {
+  """
+  The number of additions to the file.
+  """
+  additions: Int!
+
+  """
+  How the file was changed in this PullRequest
+  """
+  changeType: PatchStatus!
+
+  """
+  The number of deletions to the file.
+  """
+  deletions: Int!
+
+  """
+  The path of the file.
+  """
+  path: String!
+
+  """
+  The state of the file for the viewer.
+  """
+  viewerViewedState: FileViewedState!
+}
+
+"""
+The connection type for PullRequestChangedFile.
+"""
+type PullRequestChangedFileConnection {
+  """
+  A list of edges.
+  """
+  edges: [PullRequestChangedFileEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PullRequestChangedFile]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PullRequestChangedFileEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PullRequestChangedFile
+}
+
+"""
+Represents a Git commit part of a pull request.
+"""
+type PullRequestCommit implements Node & UniformResourceLocatable {
+  """
+  The Git commit object
+  """
+  commit: Commit!
+
+  """
+  The Node ID of the PullRequestCommit object
+  """
+  id: ID!
+
+  """
+  The pull request this commit belongs to
+  """
+  pullRequest: PullRequest!
+
+  """
+  The HTTP path for this pull request commit
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this pull request commit
+  """
+  url: URI!
+}
+
+"""
+Represents a commit comment thread part of a pull request.
+"""
+type PullRequestCommitCommentThread implements Node & RepositoryNode {
+  """
+  The comments that exist in this thread.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CommitCommentConnection!
+
+  """
+  The commit the comments were made on.
+  """
+  commit: Commit!
+
+  """
+  The Node ID of the PullRequestCommitCommentThread object
+  """
+  id: ID!
+
+  """
+  The file the comments were made on.
+  """
+  path: String
+
+  """
+  The position in the diff for the commit that the comment was made on.
+  """
+  position: Int
+
+  """
+  The pull request this commit comment thread belongs to
+  """
+  pullRequest: PullRequest!
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+}
+
+"""
+The connection type for PullRequestCommit.
+"""
+type PullRequestCommitConnection {
+  """
+  A list of edges.
+  """
+  edges: [PullRequestCommitEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PullRequestCommit]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PullRequestCommitEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PullRequestCommit
+}
+
+"""
+The connection type for PullRequest.
+"""
+type PullRequestConnection {
+  """
+  A list of edges.
+  """
+  edges: [PullRequestEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PullRequest]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+This aggregates pull requests opened by a user within one repository.
+"""
+type PullRequestContributionsByRepository {
+  """
+  The pull request contributions.
+  """
+  contributions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for contributions returned from the connection.
+    """
+    orderBy: ContributionOrder = {direction: DESC}
+  ): CreatedPullRequestContributionConnection!
+
+  """
+  The repository in which the pull requests were opened.
+  """
+  repository: Repository!
+}
+
+"""
+An edge in a connection.
+"""
+type PullRequestEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PullRequest
+}
+
+"""
+Represents available types of methods to use when merging a pull request.
+"""
+enum PullRequestMergeMethod {
+  """
+  Add all commits from the head branch to the base branch with a merge commit.
+  """
+  MERGE
+
+  """
+  Add all commits from the head branch onto the base branch individually.
+  """
+  REBASE
+
+  """
+  Combine all commits from the head branch into a single commit in the base branch.
+  """
+  SQUASH
+}
+
+"""
+Ways in which lists of issues can be ordered upon return.
+"""
+input PullRequestOrder {
+  """
+  The direction in which to order pull requests by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order pull requests by.
+  """
+  field: PullRequestOrderField!
+}
+
+"""
+Properties by which pull_requests connections can be ordered.
+"""
+enum PullRequestOrderField {
+  """
+  Order pull_requests by creation time
+  """
+  CREATED_AT
+
+  """
+  Order pull_requests by update time
+  """
+  UPDATED_AT
+}
+
+"""
+Require all commits be made to a non-target branch and submitted via a pull request before they can be merged.
+"""
+type PullRequestParameters {
+  """
+  New, reviewable commits pushed will dismiss previous pull request review approvals.
+  """
+  dismissStaleReviewsOnPush: Boolean!
+
+  """
+  Require an approving review in pull requests that modify files that have a designated code owner.
+  """
+  requireCodeOwnerReview: Boolean!
+
+  """
+  Whether the most recent reviewable push must be approved by someone other than the person who pushed it.
+  """
+  requireLastPushApproval: Boolean!
+
+  """
+  The number of approving reviews that are required before a pull request can be merged.
+  """
+  requiredApprovingReviewCount: Int!
+
+  """
+  All conversations on code must be resolved before a pull request can be merged.
+  """
+  requiredReviewThreadResolution: Boolean!
+}
+
+"""
+Require all commits be made to a non-target branch and submitted via a pull request before they can be merged.
+"""
+input PullRequestParametersInput {
+  """
+  New, reviewable commits pushed will dismiss previous pull request review approvals.
+  """
+  dismissStaleReviewsOnPush: Boolean!
+
+  """
+  Require an approving review in pull requests that modify files that have a designated code owner.
+  """
+  requireCodeOwnerReview: Boolean!
+
+  """
+  Whether the most recent reviewable push must be approved by someone other than the person who pushed it.
+  """
+  requireLastPushApproval: Boolean!
+
+  """
+  The number of approving reviews that are required before a pull request can be merged.
+  """
+  requiredApprovingReviewCount: Int!
+
+  """
+  All conversations on code must be resolved before a pull request can be merged.
+  """
+  requiredReviewThreadResolution: Boolean!
+}
+
+"""
+A review object for a given pull request.
+"""
+type PullRequestReview implements Comment & Deletable & Minimizable & Node & Reactable & RepositoryNode & Updatable & UpdatableComment {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  Indicates whether the author of this review has push access to the repository.
+  """
+  authorCanPushToRepository: Boolean!
+
+  """
+  Identifies the pull request review body.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body of this review rendered as plain text.
+  """
+  bodyText: String!
+
+  """
+  A list of review comments for the current pull request review.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PullRequestReviewCommentConnection!
+
+  """
+  Identifies the commit associated with this pull request review.
+  """
+  commit: Commit
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The Node ID of the PullRequestReview object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Returns whether or not a comment has been minimized.
+  """
+  isMinimized: Boolean!
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Returns why the comment was minimized. One of `abuse`, `off-topic`,
+  `outdated`, `resolved`, `duplicate` and `spam`. Note that the case and
+  formatting of these values differs from the inputs to the `MinimizeComment` mutation.
+  """
+  minimizedReason: String
+
+  """
+  A list of teams that this review was made on behalf of.
+  """
+  onBehalfOf(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): TeamConnection!
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  Identifies the pull request associated with this pull request review.
+  """
+  pullRequest: PullRequest!
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path permalink for this PullRequestReview.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the current state of the pull request review.
+  """
+  state: PullRequestReviewState!
+
+  """
+  Identifies when the Pull Request Review was submitted
+  """
+  submittedAt: DateTime
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL permalink for this PullRequestReview.
+  """
+  url: URI!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Check if the current viewer can minimize this object.
+  """
+  viewerCanMinimize: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+}
+
+"""
+A review comment associated with a given repository pull request.
+"""
+type PullRequestReviewComment implements Comment & Deletable & Minimizable & Node & Reactable & RepositoryNode & Updatable & UpdatableComment {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  The comment body of this review comment.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The comment body of this review comment rendered as plain text.
+  """
+  bodyText: String!
+
+  """
+  Identifies the commit associated with the comment.
+  """
+  commit: Commit
+
+  """
+  Identifies when the comment was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The diff hunk to which the comment applies.
+  """
+  diffHunk: String!
+
+  """
+  Identifies when the comment was created in a draft state.
+  """
+  draftedAt: DateTime!
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The Node ID of the PullRequestReviewComment object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Returns whether or not a comment has been minimized.
+  """
+  isMinimized: Boolean!
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  The end line number on the file to which the comment applies
+  """
+  line: Int
+
+  """
+  Returns why the comment was minimized. One of `abuse`, `off-topic`,
+  `outdated`, `resolved`, `duplicate` and `spam`. Note that the case and
+  formatting of these values differs from the inputs to the `MinimizeComment` mutation.
+  """
+  minimizedReason: String
+
+  """
+  Identifies the original commit associated with the comment.
+  """
+  originalCommit: Commit
+
+  """
+  The end line number on the file to which the comment applied when it was first created
+  """
+  originalLine: Int
+
+  """
+  The original line index in the diff to which the comment applies.
+  """
+  originalPosition: Int!
+    @deprecated(reason: "We are phasing out diff-relative positioning for PR comments Removal on 2023-10-01 UTC.")
+
+  """
+  The start line number on the file to which the comment applied when it was first created
+  """
+  originalStartLine: Int
+
+  """
+  Identifies when the comment body is outdated
+  """
+  outdated: Boolean!
+
+  """
+  The path to which the comment applies.
+  """
+  path: String!
+
+  """
+  The line index in the diff to which the comment applies.
+  """
+  position: Int
+    @deprecated(
+      reason: "We are phasing out diff-relative positioning for PR comments Use the `line` and `startLine` fields instead, which are file line numbers instead of diff line numbers Removal on 2023-10-01 UTC."
+    )
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  The pull request associated with this review comment.
+  """
+  pullRequest: PullRequest!
+
+  """
+  The pull request review associated with this review comment.
+  """
+  pullRequestReview: PullRequestReview
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The comment this is a reply to.
+  """
+  replyTo: PullRequestReviewComment
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path permalink for this review comment.
+  """
+  resourcePath: URI!
+
+  """
+  The start line number on the file to which the comment applies
+  """
+  startLine: Int
+
+  """
+  Identifies the state of the comment.
+  """
+  state: PullRequestReviewCommentState!
+
+  """
+  The level at which the comments in the corresponding thread are targeted, can be a diff line or a file
+  """
+  subjectType: PullRequestReviewThreadSubjectType!
+
+  """
+  Identifies when the comment was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL permalink for this review comment.
+  """
+  url: URI!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Check if the current viewer can minimize this object.
+  """
+  viewerCanMinimize: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+}
+
+"""
+The connection type for PullRequestReviewComment.
+"""
+type PullRequestReviewCommentConnection {
+  """
+  A list of edges.
+  """
+  edges: [PullRequestReviewCommentEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PullRequestReviewComment]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PullRequestReviewCommentEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PullRequestReviewComment
+}
+
+"""
+The possible states of a pull request review comment.
+"""
+enum PullRequestReviewCommentState {
+  """
+  A comment that is part of a pending review
+  """
+  PENDING
+
+  """
+  A comment that is part of a submitted review
+  """
+  SUBMITTED
+}
+
+"""
+The connection type for PullRequestReview.
+"""
+type PullRequestReviewConnection {
+  """
+  A list of edges.
+  """
+  edges: [PullRequestReviewEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PullRequestReview]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+This aggregates pull request reviews made by a user within one repository.
+"""
+type PullRequestReviewContributionsByRepository {
+  """
+  The pull request review contributions.
+  """
+  contributions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for contributions returned from the connection.
+    """
+    orderBy: ContributionOrder = {direction: DESC}
+  ): CreatedPullRequestReviewContributionConnection!
+
+  """
+  The repository in which the pull request reviews were made.
+  """
+  repository: Repository!
+}
+
+"""
+The review status of a pull request.
+"""
+enum PullRequestReviewDecision {
+  """
+  The pull request has received an approving review.
+  """
+  APPROVED
+
+  """
+  Changes have been requested on the pull request.
+  """
+  CHANGES_REQUESTED
+
+  """
+  A review is required before the pull request can be merged.
+  """
+  REVIEW_REQUIRED
+}
+
+"""
+An edge in a connection.
+"""
+type PullRequestReviewEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PullRequestReview
+}
+
+"""
+The possible events to perform on a pull request review.
+"""
+enum PullRequestReviewEvent {
+  """
+  Submit feedback and approve merging these changes.
+  """
+  APPROVE
+
+  """
+  Submit general feedback without explicit approval.
+  """
+  COMMENT
+
+  """
+  Dismiss review so it now longer effects merging.
+  """
+  DISMISS
+
+  """
+  Submit feedback that must be addressed before merging.
+  """
+  REQUEST_CHANGES
+}
+
+"""
+The possible states of a pull request review.
+"""
+enum PullRequestReviewState {
+  """
+  A review allowing the pull request to merge.
+  """
+  APPROVED
+
+  """
+  A review blocking the pull request from merging.
+  """
+  CHANGES_REQUESTED
+
+  """
+  An informational review.
+  """
+  COMMENTED
+
+  """
+  A review that has been dismissed.
+  """
+  DISMISSED
+
+  """
+  A review that has not yet been submitted.
+  """
+  PENDING
+}
+
+"""
+A threaded list of comments for a given pull request.
+"""
+type PullRequestReviewThread implements Node {
+  """
+  A list of pull request comments associated with the thread.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Skips the first _n_ elements in the list.
+    """
+    skip: Int
+  ): PullRequestReviewCommentConnection!
+
+  """
+  The side of the diff on which this thread was placed.
+  """
+  diffSide: DiffSide!
+
+  """
+  The Node ID of the PullRequestReviewThread object
+  """
+  id: ID!
+
+  """
+  Whether or not the thread has been collapsed (resolved)
+  """
+  isCollapsed: Boolean!
+
+  """
+  Indicates whether this thread was outdated by newer changes.
+  """
+  isOutdated: Boolean!
+
+  """
+  Whether this thread has been resolved
+  """
+  isResolved: Boolean!
+
+  """
+  The line in the file to which this thread refers
+  """
+  line: Int
+
+  """
+  The original line in the file to which this thread refers.
+  """
+  originalLine: Int
+
+  """
+  The original start line in the file to which this thread refers (multi-line only).
+  """
+  originalStartLine: Int
+
+  """
+  Identifies the file path of this thread.
+  """
+  path: String!
+
+  """
+  Identifies the pull request associated with this thread.
+  """
+  pullRequest: PullRequest!
+
+  """
+  Identifies the repository associated with this thread.
+  """
+  repository: Repository!
+
+  """
+  The user who resolved this thread
+  """
+  resolvedBy: User
+
+  """
+  The side of the diff that the first line of the thread starts on (multi-line only)
+  """
+  startDiffSide: DiffSide
+
+  """
+  The start line in the file to which this thread refers (multi-line only)
+  """
+  startLine: Int
+
+  """
+  The level at which the comments in the corresponding thread are targeted, can be a diff line or a file
+  """
+  subjectType: PullRequestReviewThreadSubjectType!
+
+  """
+  Indicates whether the current viewer can reply to this thread.
+  """
+  viewerCanReply: Boolean!
+
+  """
+  Whether or not the viewer can resolve this thread
+  """
+  viewerCanResolve: Boolean!
+
+  """
+  Whether or not the viewer can unresolve this thread
+  """
+  viewerCanUnresolve: Boolean!
+}
+
+"""
+Review comment threads for a pull request review.
+"""
+type PullRequestReviewThreadConnection {
+  """
+  A list of edges.
+  """
+  edges: [PullRequestReviewThreadEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PullRequestReviewThread]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PullRequestReviewThreadEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PullRequestReviewThread
+}
+
+"""
+The possible subject types of a pull request review comment.
+"""
+enum PullRequestReviewThreadSubjectType {
+  """
+  A comment that has been made against the file of a pull request
+  """
+  FILE
+
+  """
+  A comment that has been made against the line of a pull request
+  """
+  LINE
+}
+
+"""
+Represents the latest point in the pull request timeline for which the viewer has seen the pull request's commits.
+"""
+type PullRequestRevisionMarker {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The last commit the viewer has seen.
+  """
+  lastSeenCommit: Commit!
+
+  """
+  The pull request to which the marker belongs.
+  """
+  pullRequest: PullRequest!
+}
+
+"""
+The possible states of a pull request.
+"""
+enum PullRequestState {
+  """
+  A pull request that has been closed without being merged.
+  """
+  CLOSED
+
+  """
+  A pull request that has been closed by being merged.
+  """
+  MERGED
+
+  """
+  A pull request that is still open.
+  """
+  OPEN
+}
+
+"""
+A repository pull request template.
+"""
+type PullRequestTemplate {
+  """
+  The body of the template
+  """
+  body: String
+
+  """
+  The filename of the template
+  """
+  filename: String
+
+  """
+  The repository the template belongs to
+  """
+  repository: Repository!
+}
+
+"""
+A threaded list of comments for a given pull request.
+"""
+type PullRequestThread implements Node {
+  """
+  A list of pull request comments associated with the thread.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Skips the first _n_ elements in the list.
+    """
+    skip: Int
+  ): PullRequestReviewCommentConnection!
+
+  """
+  The side of the diff on which this thread was placed.
+  """
+  diffSide: DiffSide!
+
+  """
+  The Node ID of the PullRequestThread object
+  """
+  id: ID!
+
+  """
+  Whether or not the thread has been collapsed (resolved)
+  """
+  isCollapsed: Boolean!
+
+  """
+  Indicates whether this thread was outdated by newer changes.
+  """
+  isOutdated: Boolean!
+
+  """
+  Whether this thread has been resolved
+  """
+  isResolved: Boolean!
+
+  """
+  The line in the file to which this thread refers
+  """
+  line: Int
+
+  """
+  Identifies the file path of this thread.
+  """
+  path: String!
+
+  """
+  Identifies the pull request associated with this thread.
+  """
+  pullRequest: PullRequest!
+
+  """
+  Identifies the repository associated with this thread.
+  """
+  repository: Repository!
+
+  """
+  The user who resolved this thread
+  """
+  resolvedBy: User
+
+  """
+  The side of the diff that the first line of the thread starts on (multi-line only)
+  """
+  startDiffSide: DiffSide
+
+  """
+  The line of the first file diff in the thread.
+  """
+  startLine: Int
+
+  """
+  The level at which the comments in the corresponding thread are targeted, can be a diff line or a file
+  """
+  subjectType: PullRequestReviewThreadSubjectType!
+
+  """
+  Indicates whether the current viewer can reply to this thread.
+  """
+  viewerCanReply: Boolean!
+
+  """
+  Whether or not the viewer can resolve this thread
+  """
+  viewerCanResolve: Boolean!
+
+  """
+  Whether or not the viewer can unresolve this thread
+  """
+  viewerCanUnresolve: Boolean!
+}
+
+"""
+The connection type for PullRequestTimelineItem.
+"""
+type PullRequestTimelineConnection {
+  """
+  A list of edges.
+  """
+  edges: [PullRequestTimelineItemEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PullRequestTimelineItem]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An item in a pull request timeline
+"""
+union PullRequestTimelineItem =
+    AssignedEvent
+  | BaseRefDeletedEvent
+  | BaseRefForcePushedEvent
+  | ClosedEvent
+  | Commit
+  | CommitCommentThread
+  | CrossReferencedEvent
+  | DemilestonedEvent
+  | DeployedEvent
+  | DeploymentEnvironmentChangedEvent
+  | HeadRefDeletedEvent
+  | HeadRefForcePushedEvent
+  | HeadRefRestoredEvent
+  | IssueComment
+  | LabeledEvent
+  | LockedEvent
+  | MergedEvent
+  | MilestonedEvent
+  | PullRequestReview
+  | PullRequestReviewComment
+  | PullRequestReviewThread
+  | ReferencedEvent
+  | RenamedTitleEvent
+  | ReopenedEvent
+  | ReviewDismissedEvent
+  | ReviewRequestRemovedEvent
+  | ReviewRequestedEvent
+  | SubscribedEvent
+  | UnassignedEvent
+  | UnlabeledEvent
+  | UnlockedEvent
+  | UnsubscribedEvent
+  | UserBlockedEvent
+
+"""
+An edge in a connection.
+"""
+type PullRequestTimelineItemEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PullRequestTimelineItem
+}
+
+"""
+An item in a pull request timeline
+"""
+union PullRequestTimelineItems =
+    AddedToMergeQueueEvent
+  | AddedToProjectEvent
+  | AssignedEvent
+  | AutoMergeDisabledEvent
+  | AutoMergeEnabledEvent
+  | AutoRebaseEnabledEvent
+  | AutoSquashEnabledEvent
+  | AutomaticBaseChangeFailedEvent
+  | AutomaticBaseChangeSucceededEvent
+  | BaseRefChangedEvent
+  | BaseRefDeletedEvent
+  | BaseRefForcePushedEvent
+  | ClosedEvent
+  | CommentDeletedEvent
+  | ConnectedEvent
+  | ConvertToDraftEvent
+  | ConvertedNoteToIssueEvent
+  | ConvertedToDiscussionEvent
+  | CrossReferencedEvent
+  | DemilestonedEvent
+  | DeployedEvent
+  | DeploymentEnvironmentChangedEvent
+  | DisconnectedEvent
+  | HeadRefDeletedEvent
+  | HeadRefForcePushedEvent
+  | HeadRefRestoredEvent
+  | IssueComment
+  | LabeledEvent
+  | LockedEvent
+  | MarkedAsDuplicateEvent
+  | MentionedEvent
+  | MergedEvent
+  | MilestonedEvent
+  | MovedColumnsInProjectEvent
+  | PinnedEvent
+  | PullRequestCommit
+  | PullRequestCommitCommentThread
+  | PullRequestReview
+  | PullRequestReviewThread
+  | PullRequestRevisionMarker
+  | ReadyForReviewEvent
+  | ReferencedEvent
+  | RemovedFromMergeQueueEvent
+  | RemovedFromProjectEvent
+  | RenamedTitleEvent
+  | ReopenedEvent
+  | ReviewDismissedEvent
+  | ReviewRequestRemovedEvent
+  | ReviewRequestedEvent
+  | SubscribedEvent
+  | TransferredEvent
+  | UnassignedEvent
+  | UnlabeledEvent
+  | UnlockedEvent
+  | UnmarkedAsDuplicateEvent
+  | UnpinnedEvent
+  | UnsubscribedEvent
+  | UserBlockedEvent
+
+"""
+The connection type for PullRequestTimelineItems.
+"""
+type PullRequestTimelineItemsConnection {
+  """
+  A list of edges.
+  """
+  edges: [PullRequestTimelineItemsEdge]
+
+  """
+  Identifies the count of items after applying `before` and `after` filters.
+  """
+  filteredCount: Int!
+
+  """
+  A list of nodes.
+  """
+  nodes: [PullRequestTimelineItems]
+
+  """
+  Identifies the count of items after applying `before`/`after` filters and `first`/`last`/`skip` slicing.
+  """
+  pageCount: Int!
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+
+  """
+  Identifies the date and time when the timeline was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+An edge in a connection.
+"""
+type PullRequestTimelineItemsEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PullRequestTimelineItems
+}
+
+"""
+The possible item types found in a timeline.
+"""
+enum PullRequestTimelineItemsItemType {
+  """
+  Represents an 'added_to_merge_queue' event on a given pull request.
+  """
+  ADDED_TO_MERGE_QUEUE_EVENT
+
+  """
+  Represents a 'added_to_project' event on a given issue or pull request.
+  """
+  ADDED_TO_PROJECT_EVENT
+
+  """
+  Represents an 'assigned' event on any assignable object.
+  """
+  ASSIGNED_EVENT
+
+  """
+  Represents a 'automatic_base_change_failed' event on a given pull request.
+  """
+  AUTOMATIC_BASE_CHANGE_FAILED_EVENT
+
+  """
+  Represents a 'automatic_base_change_succeeded' event on a given pull request.
+  """
+  AUTOMATIC_BASE_CHANGE_SUCCEEDED_EVENT
+
+  """
+  Represents a 'auto_merge_disabled' event on a given pull request.
+  """
+  AUTO_MERGE_DISABLED_EVENT
+
+  """
+  Represents a 'auto_merge_enabled' event on a given pull request.
+  """
+  AUTO_MERGE_ENABLED_EVENT
+
+  """
+  Represents a 'auto_rebase_enabled' event on a given pull request.
+  """
+  AUTO_REBASE_ENABLED_EVENT
+
+  """
+  Represents a 'auto_squash_enabled' event on a given pull request.
+  """
+  AUTO_SQUASH_ENABLED_EVENT
+
+  """
+  Represents a 'base_ref_changed' event on a given issue or pull request.
+  """
+  BASE_REF_CHANGED_EVENT
+
+  """
+  Represents a 'base_ref_deleted' event on a given pull request.
+  """
+  BASE_REF_DELETED_EVENT
+
+  """
+  Represents a 'base_ref_force_pushed' event on a given pull request.
+  """
+  BASE_REF_FORCE_PUSHED_EVENT
+
+  """
+  Represents a 'closed' event on any `Closable`.
+  """
+  CLOSED_EVENT
+
+  """
+  Represents a 'comment_deleted' event on a given issue or pull request.
+  """
+  COMMENT_DELETED_EVENT
+
+  """
+  Represents a 'connected' event on a given issue or pull request.
+  """
+  CONNECTED_EVENT
+
+  """
+  Represents a 'converted_note_to_issue' event on a given issue or pull request.
+  """
+  CONVERTED_NOTE_TO_ISSUE_EVENT
+
+  """
+  Represents a 'converted_to_discussion' event on a given issue.
+  """
+  CONVERTED_TO_DISCUSSION_EVENT
+
+  """
+  Represents a 'convert_to_draft' event on a given pull request.
+  """
+  CONVERT_TO_DRAFT_EVENT
+
+  """
+  Represents a mention made by one issue or pull request to another.
+  """
+  CROSS_REFERENCED_EVENT
+
+  """
+  Represents a 'demilestoned' event on a given issue or pull request.
+  """
+  DEMILESTONED_EVENT
+
+  """
+  Represents a 'deployed' event on a given pull request.
+  """
+  DEPLOYED_EVENT
+
+  """
+  Represents a 'deployment_environment_changed' event on a given pull request.
+  """
+  DEPLOYMENT_ENVIRONMENT_CHANGED_EVENT
+
+  """
+  Represents a 'disconnected' event on a given issue or pull request.
+  """
+  DISCONNECTED_EVENT
+
+  """
+  Represents a 'head_ref_deleted' event on a given pull request.
+  """
+  HEAD_REF_DELETED_EVENT
+
+  """
+  Represents a 'head_ref_force_pushed' event on a given pull request.
+  """
+  HEAD_REF_FORCE_PUSHED_EVENT
+
+  """
+  Represents a 'head_ref_restored' event on a given pull request.
+  """
+  HEAD_REF_RESTORED_EVENT
+
+  """
+  Represents a comment on an Issue.
+  """
+  ISSUE_COMMENT
+
+  """
+  Represents a 'labeled' event on a given issue or pull request.
+  """
+  LABELED_EVENT
+
+  """
+  Represents a 'locked' event on a given issue or pull request.
+  """
+  LOCKED_EVENT
+
+  """
+  Represents a 'marked_as_duplicate' event on a given issue or pull request.
+  """
+  MARKED_AS_DUPLICATE_EVENT
+
+  """
+  Represents a 'mentioned' event on a given issue or pull request.
+  """
+  MENTIONED_EVENT
+
+  """
+  Represents a 'merged' event on a given pull request.
+  """
+  MERGED_EVENT
+
+  """
+  Represents a 'milestoned' event on a given issue or pull request.
+  """
+  MILESTONED_EVENT
+
+  """
+  Represents a 'moved_columns_in_project' event on a given issue or pull request.
+  """
+  MOVED_COLUMNS_IN_PROJECT_EVENT
+
+  """
+  Represents a 'pinned' event on a given issue or pull request.
+  """
+  PINNED_EVENT
+
+  """
+  Represents a Git commit part of a pull request.
+  """
+  PULL_REQUEST_COMMIT
+
+  """
+  Represents a commit comment thread part of a pull request.
+  """
+  PULL_REQUEST_COMMIT_COMMENT_THREAD
+
+  """
+  A review object for a given pull request.
+  """
+  PULL_REQUEST_REVIEW
+
+  """
+  A threaded list of comments for a given pull request.
+  """
+  PULL_REQUEST_REVIEW_THREAD
+
+  """
+  Represents the latest point in the pull request timeline for which the viewer has seen the pull request's commits.
+  """
+  PULL_REQUEST_REVISION_MARKER
+
+  """
+  Represents a 'ready_for_review' event on a given pull request.
+  """
+  READY_FOR_REVIEW_EVENT
+
+  """
+  Represents a 'referenced' event on a given `ReferencedSubject`.
+  """
+  REFERENCED_EVENT
+
+  """
+  Represents a 'removed_from_merge_queue' event on a given pull request.
+  """
+  REMOVED_FROM_MERGE_QUEUE_EVENT
+
+  """
+  Represents a 'removed_from_project' event on a given issue or pull request.
+  """
+  REMOVED_FROM_PROJECT_EVENT
+
+  """
+  Represents a 'renamed' event on a given issue or pull request
+  """
+  RENAMED_TITLE_EVENT
+
+  """
+  Represents a 'reopened' event on any `Closable`.
+  """
+  REOPENED_EVENT
+
+  """
+  Represents a 'review_dismissed' event on a given issue or pull request.
+  """
+  REVIEW_DISMISSED_EVENT
+
+  """
+  Represents an 'review_requested' event on a given pull request.
+  """
+  REVIEW_REQUESTED_EVENT
+
+  """
+  Represents an 'review_request_removed' event on a given pull request.
+  """
+  REVIEW_REQUEST_REMOVED_EVENT
+
+  """
+  Represents a 'subscribed' event on a given `Subscribable`.
+  """
+  SUBSCRIBED_EVENT
+
+  """
+  Represents a 'transferred' event on a given issue or pull request.
+  """
+  TRANSFERRED_EVENT
+
+  """
+  Represents an 'unassigned' event on any assignable object.
+  """
+  UNASSIGNED_EVENT
+
+  """
+  Represents an 'unlabeled' event on a given issue or pull request.
+  """
+  UNLABELED_EVENT
+
+  """
+  Represents an 'unlocked' event on a given issue or pull request.
+  """
+  UNLOCKED_EVENT
+
+  """
+  Represents an 'unmarked_as_duplicate' event on a given issue or pull request.
+  """
+  UNMARKED_AS_DUPLICATE_EVENT
+
+  """
+  Represents an 'unpinned' event on a given issue or pull request.
+  """
+  UNPINNED_EVENT
+
+  """
+  Represents an 'unsubscribed' event on a given `Subscribable`.
+  """
+  UNSUBSCRIBED_EVENT
+
+  """
+  Represents a 'user_blocked' event on a given user.
+  """
+  USER_BLOCKED_EVENT
+}
+
+"""
+The possible target states when updating a pull request.
+"""
+enum PullRequestUpdateState {
+  """
+  A pull request that has been closed without being merged.
+  """
+  CLOSED
+
+  """
+  A pull request that is still open.
+  """
+  OPEN
+}
+
+"""
+A Git push.
+"""
+type Push implements Node {
+  """
+  The Node ID of the Push object
+  """
+  id: ID!
+
+  """
+  The SHA after the push
+  """
+  nextSha: GitObjectID
+
+  """
+  The permalink for this push.
+  """
+  permalink: URI!
+
+  """
+  The SHA before the push
+  """
+  previousSha: GitObjectID
+
+  """
+  The actor who pushed
+  """
+  pusher: Actor!
+
+  """
+  The repository that was pushed to
+  """
+  repository: Repository!
+}
+
+"""
+A team, user, or app who has the ability to push to a protected branch.
+"""
+type PushAllowance implements Node {
+  """
+  The actor that can push.
+  """
+  actor: PushAllowanceActor
+
+  """
+  Identifies the branch protection rule associated with the allowed user, team, or app.
+  """
+  branchProtectionRule: BranchProtectionRule
+
+  """
+  The Node ID of the PushAllowance object
+  """
+  id: ID!
+}
+
+"""
+Types that can be an actor.
+"""
+union PushAllowanceActor = App | Team | User
+
+"""
+The connection type for PushAllowance.
+"""
+type PushAllowanceConnection {
+  """
+  A list of edges.
+  """
+  edges: [PushAllowanceEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PushAllowance]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PushAllowanceEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PushAllowance
+}
+
+"""
+The query root of GitHub's GraphQL interface.
+"""
+type Query {
+  """
+  Look up a code of conduct by its key
+  """
+  codeOfConduct(
+    """
+    The code of conduct's key
+    """
+    key: String!
+  ): CodeOfConduct
+
+  """
+  Look up a code of conduct by its key
+  """
+  codesOfConduct: [CodeOfConduct]
+
+  """
+  Look up an enterprise by URL slug.
+  """
+  enterprise(
+    """
+    The enterprise invitation token.
+    """
+    invitationToken: String
+
+    """
+    The enterprise URL slug.
+    """
+    slug: String!
+  ): Enterprise
+
+  """
+  Look up a pending enterprise administrator invitation by invitee, enterprise and role.
+  """
+  enterpriseAdministratorInvitation(
+    """
+    The slug of the enterprise the user was invited to join.
+    """
+    enterpriseSlug: String!
+
+    """
+    The role for the business member invitation.
+    """
+    role: EnterpriseAdministratorRole!
+
+    """
+    The login of the user invited to join the business.
+    """
+    userLogin: String!
+  ): EnterpriseAdministratorInvitation
+
+  """
+  Look up a pending enterprise administrator invitation by invitation token.
+  """
+  enterpriseAdministratorInvitationByToken(
+    """
+    The invitation token sent with the invitation email.
+    """
+    invitationToken: String!
+  ): EnterpriseAdministratorInvitation
+
+  """
+  Look up an open source license by its key
+  """
+  license(
+    """
+    The license's downcased SPDX ID
+    """
+    key: String!
+  ): License
+
+  """
+  Return a list of known open source licenses
+  """
+  licenses: [License]!
+
+  """
+  Get alphabetically sorted list of Marketplace categories
+  """
+  marketplaceCategories(
+    """
+    Exclude categories with no listings.
+    """
+    excludeEmpty: Boolean
+
+    """
+    Returns top level categories only, excluding any subcategories.
+    """
+    excludeSubcategories: Boolean
+
+    """
+    Return only the specified categories.
+    """
+    includeCategories: [String!]
+  ): [MarketplaceCategory!]!
+
+  """
+  Look up a Marketplace category by its slug.
+  """
+  marketplaceCategory(
+    """
+    The URL slug of the category.
+    """
+    slug: String!
+
+    """
+    Also check topic aliases for the category slug
+    """
+    useTopicAliases: Boolean
+  ): MarketplaceCategory
+
+  """
+  Look up a single Marketplace listing
+  """
+  marketplaceListing(
+    """
+    Select the listing that matches this slug. It's the short name of the listing used in its URL.
+    """
+    slug: String!
+  ): MarketplaceListing
+
+  """
+  Look up Marketplace listings
+  """
+  marketplaceListings(
+    """
+    Select listings that can be administered by the specified user.
+    """
+    adminId: ID
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Select listings visible to the viewer even if they are not approved. If omitted or
+    false, only approved listings will be returned.
+    """
+    allStates: Boolean
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Select only listings with the given category.
+    """
+    categorySlug: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Select listings for products owned by the specified organization.
+    """
+    organizationId: ID
+
+    """
+    Select only listings where the primary category matches the given category slug.
+    """
+    primaryCategoryOnly: Boolean = false
+
+    """
+    Select the listings with these slugs, if they are visible to the viewer.
+    """
+    slugs: [String]
+
+    """
+    Also check topic aliases for the category slug
+    """
+    useTopicAliases: Boolean
+
+    """
+    Select listings to which user has admin access. If omitted, listings visible to the
+    viewer are returned.
+    """
+    viewerCanAdmin: Boolean
+
+    """
+    Select only listings that offer a free trial.
+    """
+    withFreeTrialsOnly: Boolean = false
+  ): MarketplaceListingConnection!
+
+  """
+  Return information about the GitHub instance
+  """
+  meta: GitHubMetadata!
+
+  """
+  Fetches an object given its ID.
+  """
+  node(
+    """
+    ID of the object.
+    """
+    id: ID!
+  ): Node
+
+  """
+  Lookup nodes by a list of IDs.
+  """
+  nodes(
+    """
+    The list of node IDs.
+    """
+    ids: [ID!]!
+  ): [Node]!
+
+  """
+  Lookup a organization by login.
+  """
+  organization(
+    """
+    The organization's login.
+    """
+    login: String!
+  ): Organization
+
+  """
+  The client's rate limit information.
+  """
+  rateLimit(
+    """
+    If true, calculate the cost for the query without evaluating it
+    """
+    dryRun: Boolean = false
+  ): RateLimit
+
+  """
+  Workaround for re-exposing the root query object. (Refer to
+  https://github.com/facebook/relay/issues/112 for more information.)
+  """
+  relay: Query!
+
+  """
+  Lookup a given repository by the owner and repository name.
+  """
+  repository(
+    """
+    Follow repository renames. If disabled, a repository referenced by its old name will return an error.
+    """
+    followRenames: Boolean = true
+
+    """
+    The name of the repository
+    """
+    name: String!
+
+    """
+    The login field of a user or organization
+    """
+    owner: String!
+  ): Repository
+
+  """
+  Lookup a repository owner (ie. either a User or an Organization) by login.
+  """
+  repositoryOwner(
+    """
+    The username to lookup the owner by.
+    """
+    login: String!
+  ): RepositoryOwner
+
+  """
+  Lookup resource by a URL.
+  """
+  resource(
+    """
+    The URL.
+    """
+    url: URI!
+  ): UniformResourceLocatable
+
+  """
+  Perform a search across resources, returning a maximum of 1,000 results.
+  """
+  search(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    The search string to look for.
+    """
+    query: String!
+
+    """
+    The types of search items to search within.
+    """
+    type: SearchType!
+  ): SearchResultItemConnection!
+
+  """
+  GitHub Security Advisories
+  """
+  securityAdvisories(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    A list of classifications to filter advisories by.
+    """
+    classifications: [SecurityAdvisoryClassification!]
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Filter advisories by identifier, e.g. GHSA or CVE.
+    """
+    identifier: SecurityAdvisoryIdentifierFilter
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the returned topics.
+    """
+    orderBy: SecurityAdvisoryOrder = {field: UPDATED_AT, direction: DESC}
+
+    """
+    Filter advisories to those published since a time in the past.
+    """
+    publishedSince: DateTime
+
+    """
+    Filter advisories to those updated since a time in the past.
+    """
+    updatedSince: DateTime
+  ): SecurityAdvisoryConnection!
+
+  """
+  Fetch a Security Advisory by its GHSA ID
+  """
+  securityAdvisory(
+    """
+    GitHub Security Advisory ID.
+    """
+    ghsaId: String!
+  ): SecurityAdvisory
+
+  """
+  Software Vulnerabilities documented by GitHub Security Advisories
+  """
+  securityVulnerabilities(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    A list of advisory classifications to filter vulnerabilities by.
+    """
+    classifications: [SecurityAdvisoryClassification!]
+
+    """
+    An ecosystem to filter vulnerabilities by.
+    """
+    ecosystem: SecurityAdvisoryEcosystem
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the returned topics.
+    """
+    orderBy: SecurityVulnerabilityOrder = {field: UPDATED_AT, direction: DESC}
+
+    """
+    A package name to filter vulnerabilities by.
+    """
+    package: String
+
+    """
+    A list of severities to filter vulnerabilities by.
+    """
+    severities: [SecurityAdvisorySeverity!]
+  ): SecurityVulnerabilityConnection!
+
+  """
+  Users and organizations who can be sponsored via GitHub Sponsors.
+  """
+  sponsorables(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Optional filter for which dependencies should be checked for sponsorable
+    owners. Only sponsorable owners of dependencies in this ecosystem will be
+    included. Used when onlyDependencies = true.
+
+    **Upcoming Change on 2022-07-01 UTC**
+    **Description:** `dependencyEcosystem` will be removed. Use the ecosystem argument instead.
+    **Reason:** The type is switching from SecurityAdvisoryEcosystem to DependencyGraphEcosystem.
+    """
+    dependencyEcosystem: SecurityAdvisoryEcosystem
+
+    """
+    Optional filter for which dependencies should be checked for sponsorable
+    owners. Only sponsorable owners of dependencies in this ecosystem will be
+    included. Used when onlyDependencies = true.
+    """
+    ecosystem: DependencyGraphEcosystem
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Whether only sponsorables who own the viewer's dependencies will be
+    returned. Must be authenticated to use. Can check an organization instead
+    for their dependencies owned by sponsorables by passing
+    orgLoginForDependencies.
+    """
+    onlyDependencies: Boolean = false
+
+    """
+    Ordering options for users and organizations returned from the connection.
+    """
+    orderBy: SponsorableOrder = {field: LOGIN, direction: ASC}
+
+    """
+    Optional organization username for whose dependencies should be checked.
+    Used when onlyDependencies = true. Omit to check your own dependencies. If
+    you are not an administrator of the organization, only dependencies from its
+    public repositories will be considered.
+    """
+    orgLoginForDependencies: String
+  ): SponsorableItemConnection!
+
+  """
+  Look up a topic by name.
+  """
+  topic(
+    """
+    The topic's name.
+    """
+    name: String!
+  ): Topic
+
+  """
+  Lookup a user by login.
+  """
+  user(
+    """
+    The user's login.
+    """
+    login: String!
+  ): User
+
+  """
+  The currently authenticated user.
+  """
+  viewer: User!
+}
+
+"""
+Represents the client's rate limit.
+"""
+type RateLimit {
+  """
+  The point cost for the current query counting against the rate limit.
+  """
+  cost: Int!
+
+  """
+  The maximum number of points the client is permitted to consume in a 60 minute window.
+  """
+  limit: Int!
+
+  """
+  The maximum number of nodes this query may return
+  """
+  nodeCount: Int!
+
+  """
+  The number of points remaining in the current rate limit window.
+  """
+  remaining: Int!
+
+  """
+  The time at which the current rate limit window resets in UTC epoch seconds.
+  """
+  resetAt: DateTime!
+
+  """
+  The number of points used in the current rate limit window.
+  """
+  used: Int!
+}
+
+"""
+Represents a subject that can be reacted on.
+"""
+interface Reactable {
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the Reactable object
+  """
+  id: ID!
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+}
+
+"""
+The connection type for User.
+"""
+type ReactingUserConnection {
+  """
+  A list of edges.
+  """
+  edges: [ReactingUserEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a user that's made a reaction.
+"""
+type ReactingUserEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+  node: User!
+
+  """
+  The moment when the user made the reaction.
+  """
+  reactedAt: DateTime!
+}
+
+"""
+An emoji reaction to a particular piece of content.
+"""
+type Reaction implements Node {
+  """
+  Identifies the emoji reaction.
+  """
+  content: ReactionContent!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the Reaction object
+  """
+  id: ID!
+
+  """
+  The reactable piece of content
+  """
+  reactable: Reactable!
+
+  """
+  Identifies the user who created this reaction.
+  """
+  user: User
+}
+
+"""
+A list of reactions that have been left on the subject.
+"""
+type ReactionConnection {
+  """
+  A list of edges.
+  """
+  edges: [ReactionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Reaction]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+
+  """
+  Whether or not the authenticated user has left a reaction on the subject.
+  """
+  viewerHasReacted: Boolean!
+}
+
+"""
+Emojis that can be attached to Issues, Pull Requests and Comments.
+"""
+enum ReactionContent {
+  """
+  Represents the `:confused:` emoji.
+  """
+  CONFUSED
+
+  """
+  Represents the `:eyes:` emoji.
+  """
+  EYES
+
+  """
+  Represents the `:heart:` emoji.
+  """
+  HEART
+
+  """
+  Represents the `:hooray:` emoji.
+  """
+  HOORAY
+
+  """
+  Represents the `:laugh:` emoji.
+  """
+  LAUGH
+
+  """
+  Represents the `:rocket:` emoji.
+  """
+  ROCKET
+
+  """
+  Represents the `:-1:` emoji.
+  """
+  THUMBS_DOWN
+
+  """
+  Represents the `:+1:` emoji.
+  """
+  THUMBS_UP
+}
+
+"""
+An edge in a connection.
+"""
+type ReactionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Reaction
+}
+
+"""
+A group of emoji reactions to a particular piece of content.
+"""
+type ReactionGroup {
+  """
+  Identifies the emoji reaction.
+  """
+  content: ReactionContent!
+
+  """
+  Identifies when the reaction was created.
+  """
+  createdAt: DateTime
+
+  """
+  Reactors to the reaction subject with the emotion represented by this reaction group.
+  """
+  reactors(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ReactorConnection!
+
+  """
+  The subject that was reacted to.
+  """
+  subject: Reactable!
+
+  """
+  Users who have reacted to the reaction subject with the emotion represented by this reaction group
+  """
+  users(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ReactingUserConnection!
+    @deprecated(
+      reason: "Reactors can now be mannequins, bots, and organizations. Use the `reactors` field instead. Removal on 2021-10-01 UTC."
+    )
+
+  """
+  Whether or not the authenticated user has left a reaction on the subject.
+  """
+  viewerHasReacted: Boolean!
+}
+
+"""
+Ways in which lists of reactions can be ordered upon return.
+"""
+input ReactionOrder {
+  """
+  The direction in which to order reactions by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order reactions by.
+  """
+  field: ReactionOrderField!
+}
+
+"""
+A list of fields that reactions can be ordered by.
+"""
+enum ReactionOrderField {
+  """
+  Allows ordering a list of reactions by when they were created.
+  """
+  CREATED_AT
+}
+
+"""
+Types that can be assigned to reactions.
+"""
+union Reactor = Bot | Mannequin | Organization | User
+
+"""
+The connection type for Reactor.
+"""
+type ReactorConnection {
+  """
+  A list of edges.
+  """
+  edges: [ReactorEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Reactor]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents an author of a reaction.
+"""
+type ReactorEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The author of the reaction.
+  """
+  node: Reactor!
+
+  """
+  The moment when the user made the reaction.
+  """
+  reactedAt: DateTime!
+}
+
+"""
+Represents a 'ready_for_review' event on a given pull request.
+"""
+type ReadyForReviewEvent implements Node & UniformResourceLocatable {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the ReadyForReviewEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  The HTTP path for this ready for review event.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this ready for review event.
+  """
+  url: URI!
+}
+
+"""
+Represents a Git reference.
+"""
+type Ref implements Node {
+  """
+  A list of pull requests with this ref as the head ref.
+  """
+  associatedPullRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    The base ref name to filter the pull requests by.
+    """
+    baseRefName: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    The head ref name to filter the pull requests by.
+    """
+    headRefName: String
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pull requests returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the pull requests by.
+    """
+    states: [PullRequestState!]
+  ): PullRequestConnection!
+
+  """
+  Branch protection rules for this ref
+  """
+  branchProtectionRule: BranchProtectionRule
+
+  """
+  Compares the current ref as a base ref to another head ref, if the comparison can be made.
+  """
+  compare(
+    """
+    The head ref to compare against.
+    """
+    headRef: String!
+  ): Comparison
+
+  """
+  The Node ID of the Ref object
+  """
+  id: ID!
+
+  """
+  The ref name.
+  """
+  name: String!
+
+  """
+  The ref's prefix, such as `refs/heads/` or `refs/tags/`.
+  """
+  prefix: String!
+
+  """
+  Branch protection rules that are viewable by non-admins
+  """
+  refUpdateRule: RefUpdateRule
+
+  """
+  The repository the ref belongs to.
+  """
+  repository: Repository!
+
+  """
+  The object the ref points to. Returns null when object does not exist.
+  """
+  target: GitObject
+}
+
+"""
+The connection type for Ref.
+"""
+type RefConnection {
+  """
+  A list of edges.
+  """
+  edges: [RefEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Ref]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type RefEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Ref
+}
+
+"""
+Parameters to be used for the ref_name condition
+"""
+type RefNameConditionTarget {
+  """
+  Array of ref names or patterns to exclude. The condition will not pass if any of these patterns match.
+  """
+  exclude: [String!]!
+
+  """
+  Array of ref names or patterns to include. One of these patterns must match
+  for the condition to pass. Also accepts `~DEFAULT_BRANCH` to include the
+  default branch or `~ALL` to include all branches.
+  """
+  include: [String!]!
+}
+
+"""
+Parameters to be used for the ref_name condition
+"""
+input RefNameConditionTargetInput {
+  """
+  Array of ref names or patterns to exclude. The condition will not pass if any of these patterns match.
+  """
+  exclude: [String!]!
+
+  """
+  Array of ref names or patterns to include. One of these patterns must match
+  for the condition to pass. Also accepts `~DEFAULT_BRANCH` to include the
+  default branch or `~ALL` to include all branches.
+  """
+  include: [String!]!
+}
+
+"""
+Ways in which lists of git refs can be ordered upon return.
+"""
+input RefOrder {
+  """
+  The direction in which to order refs by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order refs by.
+  """
+  field: RefOrderField!
+}
+
+"""
+Properties by which ref connections can be ordered.
+"""
+enum RefOrderField {
+  """
+  Order refs by their alphanumeric name
+  """
+  ALPHABETICAL
+
+  """
+  Order refs by underlying commit date if the ref prefix is refs/tags/
+  """
+  TAG_COMMIT_DATE
+}
+
+"""
+A ref update
+"""
+input RefUpdate @preview(toggledBy: "update-refs-preview") {
+  """
+  The value this ref should be updated to.
+  """
+  afterOid: GitObjectID!
+
+  """
+  The value this ref needs to point to before the update.
+  """
+  beforeOid: GitObjectID
+
+  """
+  Force a non fast-forward update.
+  """
+  force: Boolean = false
+
+  """
+  The fully qualified name of the ref to be update. For example `refs/heads/branch-name`
+  """
+  name: GitRefname!
+}
+
+"""
+A ref update rules for a viewer.
+"""
+type RefUpdateRule {
+  """
+  Can this branch be deleted.
+  """
+  allowsDeletions: Boolean!
+
+  """
+  Are force pushes allowed on this branch.
+  """
+  allowsForcePushes: Boolean!
+
+  """
+  Can matching branches be created.
+  """
+  blocksCreations: Boolean!
+
+  """
+  Identifies the protection rule pattern.
+  """
+  pattern: String!
+
+  """
+  Number of approving reviews required to update matching branches.
+  """
+  requiredApprovingReviewCount: Int
+
+  """
+  List of required status check contexts that must pass for commits to be accepted to matching branches.
+  """
+  requiredStatusCheckContexts: [String]
+
+  """
+  Are reviews from code owners required to update matching branches.
+  """
+  requiresCodeOwnerReviews: Boolean!
+
+  """
+  Are conversations required to be resolved before merging.
+  """
+  requiresConversationResolution: Boolean!
+
+  """
+  Are merge commits prohibited from being pushed to this branch.
+  """
+  requiresLinearHistory: Boolean!
+
+  """
+  Are commits required to be signed.
+  """
+  requiresSignatures: Boolean!
+
+  """
+  Is the viewer allowed to dismiss reviews.
+  """
+  viewerAllowedToDismissReviews: Boolean!
+
+  """
+  Can the viewer push to the branch
+  """
+  viewerCanPush: Boolean!
+}
+
+"""
+Represents a 'referenced' event on a given `ReferencedSubject`.
+"""
+type ReferencedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the commit associated with the 'referenced' event.
+  """
+  commit: Commit
+
+  """
+  Identifies the repository associated with the 'referenced' event.
+  """
+  commitRepository: Repository!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the ReferencedEvent object
+  """
+  id: ID!
+
+  """
+  Reference originated in a different repository.
+  """
+  isCrossRepository: Boolean!
+
+  """
+  Checks if the commit message itself references the subject. Can be false in the case of a commit comment reference.
+  """
+  isDirectReference: Boolean!
+
+  """
+  Object referenced by event.
+  """
+  subject: ReferencedSubject!
+}
+
+"""
+Any referenceable object
+"""
+union ReferencedSubject = Issue | PullRequest
+
+"""
+Autogenerated input type of RegenerateEnterpriseIdentityProviderRecoveryCodes
+"""
+input RegenerateEnterpriseIdentityProviderRecoveryCodesInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set an identity provider.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+}
+
+"""
+Autogenerated return type of RegenerateEnterpriseIdentityProviderRecoveryCodes
+"""
+type RegenerateEnterpriseIdentityProviderRecoveryCodesPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The identity provider for the enterprise.
+  """
+  identityProvider: EnterpriseIdentityProvider
+}
+
+"""
+Autogenerated input type of RegenerateVerifiableDomainToken
+"""
+input RegenerateVerifiableDomainTokenInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the verifiable domain to regenerate the verification token of.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["VerifiableDomain"])
+}
+
+"""
+Autogenerated return type of RegenerateVerifiableDomainToken
+"""
+type RegenerateVerifiableDomainTokenPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The verification token that was generated.
+  """
+  verificationToken: String
+}
+
+"""
+Autogenerated input type of RejectDeployments
+"""
+input RejectDeploymentsInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Optional comment for rejecting deployments
+  """
+  comment: String = ""
+
+  """
+  The ids of environments to reject deployments
+  """
+  environmentIds: [ID!]!
+
+  """
+  The node ID of the workflow run containing the pending deployments.
+  """
+  workflowRunId: ID! @possibleTypes(concreteTypes: ["WorkflowRun"])
+}
+
+"""
+Autogenerated return type of RejectDeployments
+"""
+type RejectDeploymentsPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The affected deployments.
+  """
+  deployments: [Deployment!]
+}
+
+"""
+A release contains the content for a release.
+"""
+type Release implements Node & Reactable & UniformResourceLocatable {
+  """
+  The author of the release
+  """
+  author: User
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The description of the release.
+  """
+  description: String
+
+  """
+  The description of this release rendered to HTML.
+  """
+  descriptionHTML: HTML
+
+  """
+  The Node ID of the Release object
+  """
+  id: ID!
+
+  """
+  Whether or not the release is a draft
+  """
+  isDraft: Boolean!
+
+  """
+  Whether or not the release is the latest releast
+  """
+  isLatest: Boolean!
+
+  """
+  Whether or not the release is a prerelease
+  """
+  isPrerelease: Boolean!
+
+  """
+  A list of users mentioned in the release description
+  """
+  mentions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection
+
+  """
+  The title of the release.
+  """
+  name: String
+
+  """
+  Identifies the date and time when the release was created.
+  """
+  publishedAt: DateTime
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  List of releases assets which are dependent on this release.
+  """
+  releaseAssets(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    A list of names to filter the assets by.
+    """
+    name: String
+  ): ReleaseAssetConnection!
+
+  """
+  The repository that the release belongs to.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this issue
+  """
+  resourcePath: URI!
+
+  """
+  A description of the release, rendered to HTML without any links in it.
+  """
+  shortDescriptionHTML(
+    """
+    How many characters to return.
+    """
+    limit: Int = 200
+  ): HTML
+
+  """
+  The Git tag the release points to
+  """
+  tag: Ref
+
+  """
+  The tag commit for this release.
+  """
+  tagCommit: Commit
+
+  """
+  The name of the release's Git tag
+  """
+  tagName: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this issue
+  """
+  url: URI!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+}
+
+"""
+A release asset contains the content for a release asset.
+"""
+type ReleaseAsset implements Node {
+  """
+  The asset's content-type
+  """
+  contentType: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The number of times this asset was downloaded
+  """
+  downloadCount: Int!
+
+  """
+  Identifies the URL where you can download the release asset via the browser.
+  """
+  downloadUrl: URI!
+
+  """
+  The Node ID of the ReleaseAsset object
+  """
+  id: ID!
+
+  """
+  Identifies the title of the release asset.
+  """
+  name: String!
+
+  """
+  Release that the asset is associated with
+  """
+  release: Release
+
+  """
+  The size (in bytes) of the asset
+  """
+  size: Int!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The user that performed the upload
+  """
+  uploadedBy: User!
+
+  """
+  Identifies the URL of the release asset.
+  """
+  url: URI!
+}
+
+"""
+The connection type for ReleaseAsset.
+"""
+type ReleaseAssetConnection {
+  """
+  A list of edges.
+  """
+  edges: [ReleaseAssetEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ReleaseAsset]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ReleaseAssetEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ReleaseAsset
+}
+
+"""
+The connection type for Release.
+"""
+type ReleaseConnection {
+  """
+  A list of edges.
+  """
+  edges: [ReleaseEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Release]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ReleaseEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Release
+}
+
+"""
+Ways in which lists of releases can be ordered upon return.
+"""
+input ReleaseOrder {
+  """
+  The direction in which to order releases by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order releases by.
+  """
+  field: ReleaseOrderField!
+}
+
+"""
+Properties by which release connections can be ordered.
+"""
+enum ReleaseOrderField {
+  """
+  Order releases by creation time
+  """
+  CREATED_AT
+
+  """
+  Order releases alphabetically by name
+  """
+  NAME
+}
+
+"""
+Autogenerated input type of RemoveAssigneesFromAssignable
+"""
+input RemoveAssigneesFromAssignableInput {
+  """
+  The id of the assignable object to remove assignees from.
+  """
+  assignableId: ID! @possibleTypes(concreteTypes: ["Issue", "PullRequest"], abstractType: "Assignable")
+
+  """
+  The id of users to remove as assignees.
+  """
+  assigneeIds: [ID!]! @possibleTypes(concreteTypes: ["User"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated return type of RemoveAssigneesFromAssignable
+"""
+type RemoveAssigneesFromAssignablePayload {
+  """
+  The item that was unassigned.
+  """
+  assignable: Assignable
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of RemoveEnterpriseAdmin
+"""
+input RemoveEnterpriseAdminInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Enterprise ID from which to remove the administrator.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The login of the user to remove as an administrator.
+  """
+  login: String!
+}
+
+"""
+Autogenerated return type of RemoveEnterpriseAdmin
+"""
+type RemoveEnterpriseAdminPayload {
+  """
+  The user who was removed as an administrator.
+  """
+  admin: User
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated enterprise.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of removing an administrator.
+  """
+  message: String
+
+  """
+  The viewer performing the mutation.
+  """
+  viewer: User
+}
+
+"""
+Autogenerated input type of RemoveEnterpriseIdentityProvider
+"""
+input RemoveEnterpriseIdentityProviderInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise from which to remove the identity provider.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+}
+
+"""
+Autogenerated return type of RemoveEnterpriseIdentityProvider
+"""
+type RemoveEnterpriseIdentityProviderPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The identity provider that was removed from the enterprise.
+  """
+  identityProvider: EnterpriseIdentityProvider
+}
+
+"""
+Autogenerated input type of RemoveEnterpriseMember
+"""
+input RemoveEnterpriseMemberInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise from which the user should be removed.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The ID of the user to remove from the enterprise.
+  """
+  userId: ID! @possibleTypes(concreteTypes: ["User"])
+}
+
+"""
+Autogenerated return type of RemoveEnterpriseMember
+"""
+type RemoveEnterpriseMemberPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated enterprise.
+  """
+  enterprise: Enterprise
+
+  """
+  The user that was removed from the enterprise.
+  """
+  user: User
+
+  """
+  The viewer performing the mutation.
+  """
+  viewer: User
+}
+
+"""
+Autogenerated input type of RemoveEnterpriseOrganization
+"""
+input RemoveEnterpriseOrganizationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise from which the organization should be removed.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The ID of the organization to remove from the enterprise.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of RemoveEnterpriseOrganization
+"""
+type RemoveEnterpriseOrganizationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated enterprise.
+  """
+  enterprise: Enterprise
+
+  """
+  The organization that was removed from the enterprise.
+  """
+  organization: Organization
+
+  """
+  The viewer performing the mutation.
+  """
+  viewer: User
+}
+
+"""
+Autogenerated input type of RemoveEnterpriseSupportEntitlement
+"""
+input RemoveEnterpriseSupportEntitlementInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Enterprise which the admin belongs to.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The login of a member who will lose the support entitlement.
+  """
+  login: String!
+}
+
+"""
+Autogenerated return type of RemoveEnterpriseSupportEntitlement
+"""
+type RemoveEnterpriseSupportEntitlementPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A message confirming the result of removing the support entitlement.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of RemoveLabelsFromLabelable
+"""
+input RemoveLabelsFromLabelableInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ids of labels to remove.
+  """
+  labelIds: [ID!]! @possibleTypes(concreteTypes: ["Label"])
+
+  """
+  The id of the Labelable to remove labels from.
+  """
+  labelableId: ID! @possibleTypes(concreteTypes: ["Discussion", "Issue", "PullRequest"], abstractType: "Labelable")
+}
+
+"""
+Autogenerated return type of RemoveLabelsFromLabelable
+"""
+type RemoveLabelsFromLabelablePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Labelable the labels were removed from.
+  """
+  labelable: Labelable
+}
+
+"""
+Autogenerated input type of RemoveOutsideCollaborator
+"""
+input RemoveOutsideCollaboratorInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the organization to remove the outside collaborator from.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+
+  """
+  The ID of the outside collaborator to remove.
+  """
+  userId: ID! @possibleTypes(concreteTypes: ["User"])
+}
+
+"""
+Autogenerated return type of RemoveOutsideCollaborator
+"""
+type RemoveOutsideCollaboratorPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The user that was removed as an outside collaborator.
+  """
+  removedUser: User
+}
+
+"""
+Autogenerated input type of RemoveReaction
+"""
+input RemoveReactionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of the emoji reaction to remove.
+  """
+  content: ReactionContent!
+
+  """
+  The Node ID of the subject to modify.
+  """
+  subjectId: ID!
+    @possibleTypes(
+      concreteTypes: [
+        "CommitComment"
+        "Discussion"
+        "DiscussionComment"
+        "Issue"
+        "IssueComment"
+        "PullRequest"
+        "PullRequestReview"
+        "PullRequestReviewComment"
+        "Release"
+        "TeamDiscussion"
+        "TeamDiscussionComment"
+      ]
+      abstractType: "Reactable"
+    )
+}
+
+"""
+Autogenerated return type of RemoveReaction
+"""
+type RemoveReactionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The reaction object.
+  """
+  reaction: Reaction
+
+  """
+  The reaction groups for the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  The reactable subject.
+  """
+  subject: Reactable
+}
+
+"""
+Autogenerated input type of RemoveStar
+"""
+input RemoveStarInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Starrable ID to unstar.
+  """
+  starrableId: ID! @possibleTypes(concreteTypes: ["Gist", "Repository", "Topic"], abstractType: "Starrable")
+}
+
+"""
+Autogenerated return type of RemoveStar
+"""
+type RemoveStarPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The starrable.
+  """
+  starrable: Starrable
+}
+
+"""
+Autogenerated input type of RemoveUpvote
+"""
+input RemoveUpvoteInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion or comment to remove upvote.
+  """
+  subjectId: ID! @possibleTypes(concreteTypes: ["Discussion", "DiscussionComment"], abstractType: "Votable")
+}
+
+"""
+Autogenerated return type of RemoveUpvote
+"""
+type RemoveUpvotePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The votable subject.
+  """
+  subject: Votable
+}
+
+"""
+Represents a 'removed_from_merge_queue' event on a given pull request.
+"""
+type RemovedFromMergeQueueEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the before commit SHA for the 'removed_from_merge_queue' event.
+  """
+  beforeCommit: Commit
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The user who removed this Pull Request from the merge queue
+  """
+  enqueuer: User
+
+  """
+  The Node ID of the RemovedFromMergeQueueEvent object
+  """
+  id: ID!
+
+  """
+  The merge queue where this pull request was removed from.
+  """
+  mergeQueue: MergeQueue
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest
+
+  """
+  The reason this pull request was removed from the queue.
+  """
+  reason: String
+}
+
+"""
+Represents a 'removed_from_project' event on a given issue or pull request.
+"""
+type RemovedFromProjectEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the RemovedFromProjectEvent object
+  """
+  id: ID!
+
+  """
+  Project referenced by event.
+  """
+  project: Project @preview(toggledBy: "starfox-preview")
+
+  """
+  Column name referenced by this project event.
+  """
+  projectColumnName: String! @preview(toggledBy: "starfox-preview")
+}
+
+"""
+Represents a 'renamed' event on a given issue or pull request
+"""
+type RenamedTitleEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the current title of the issue or pull request.
+  """
+  currentTitle: String!
+
+  """
+  The Node ID of the RenamedTitleEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the previous title of the issue or pull request.
+  """
+  previousTitle: String!
+
+  """
+  Subject that was renamed.
+  """
+  subject: RenamedTitleSubject!
+}
+
+"""
+An object which has a renamable title
+"""
+union RenamedTitleSubject = Issue | PullRequest
+
+"""
+Autogenerated input type of ReopenDiscussion
+"""
+input ReopenDiscussionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the discussion to be reopened.
+  """
+  discussionId: ID! @possibleTypes(concreteTypes: ["Discussion"])
+}
+
+"""
+Autogenerated return type of ReopenDiscussion
+"""
+type ReopenDiscussionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The discussion that was reopened.
+  """
+  discussion: Discussion
+}
+
+"""
+Autogenerated input type of ReopenIssue
+"""
+input ReopenIssueInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the issue to be opened.
+  """
+  issueId: ID! @possibleTypes(concreteTypes: ["Issue"])
+}
+
+"""
+Autogenerated return type of ReopenIssue
+"""
+type ReopenIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue that was opened.
+  """
+  issue: Issue
+}
+
+"""
+Autogenerated input type of ReopenPullRequest
+"""
+input ReopenPullRequestInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the pull request to be reopened.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of ReopenPullRequest
+"""
+type ReopenPullRequestPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request that was reopened.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'reopened' event on any `Closable`.
+"""
+type ReopenedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Object that was reopened.
+  """
+  closable: Closable!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the ReopenedEvent object
+  """
+  id: ID!
+
+  """
+  The reason the issue state was changed to open.
+  """
+  stateReason: IssueStateReason
+}
+
+"""
+Audit log entry for a repo.access event.
+"""
+type RepoAccessAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoAccessAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+
+  """
+  The visibility of the repository
+  """
+  visibility: RepoAccessAuditEntryVisibility
+}
+
+"""
+The privacy of a repository
+"""
+enum RepoAccessAuditEntryVisibility {
+  """
+  The repository is visible only to users in the same business.
+  """
+  INTERNAL
+
+  """
+  The repository is visible only to those with explicit access.
+  """
+  PRIVATE
+
+  """
+  The repository is visible to everyone.
+  """
+  PUBLIC
+}
+
+"""
+Audit log entry for a repo.add_member event.
+"""
+type RepoAddMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoAddMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+
+  """
+  The visibility of the repository
+  """
+  visibility: RepoAddMemberAuditEntryVisibility
+}
+
+"""
+The privacy of a repository
+"""
+enum RepoAddMemberAuditEntryVisibility {
+  """
+  The repository is visible only to users in the same business.
+  """
+  INTERNAL
+
+  """
+  The repository is visible only to those with explicit access.
+  """
+  PRIVATE
+
+  """
+  The repository is visible to everyone.
+  """
+  PUBLIC
+}
+
+"""
+Audit log entry for a repo.add_topic event.
+"""
+type RepoAddTopicAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData & TopicAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoAddTopicAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The name of the topic added to the repository
+  """
+  topic: Topic
+
+  """
+  The name of the topic added to the repository
+  """
+  topicName: String
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.archived event.
+"""
+type RepoArchivedAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoArchivedAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+
+  """
+  The visibility of the repository
+  """
+  visibility: RepoArchivedAuditEntryVisibility
+}
+
+"""
+The privacy of a repository
+"""
+enum RepoArchivedAuditEntryVisibility {
+  """
+  The repository is visible only to users in the same business.
+  """
+  INTERNAL
+
+  """
+  The repository is visible only to those with explicit access.
+  """
+  PRIVATE
+
+  """
+  The repository is visible to everyone.
+  """
+  PUBLIC
+}
+
+"""
+Audit log entry for a repo.change_merge_setting event.
+"""
+type RepoChangeMergeSettingAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoChangeMergeSettingAuditEntry object
+  """
+  id: ID!
+
+  """
+  Whether the change was to enable (true) or disable (false) the merge type
+  """
+  isEnabled: Boolean
+
+  """
+  The merge method affected by the change
+  """
+  mergeType: RepoChangeMergeSettingAuditEntryMergeType
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The merge options available for pull requests to this repository.
+"""
+enum RepoChangeMergeSettingAuditEntryMergeType {
+  """
+  The pull request is added to the base branch in a merge commit.
+  """
+  MERGE
+
+  """
+  Commits from the pull request are added onto the base branch individually without a merge commit.
+  """
+  REBASE
+
+  """
+  The pull request's commits are squashed into a single commit before they are merged to the base branch.
+  """
+  SQUASH
+}
+
+"""
+Audit log entry for a repo.config.disable_anonymous_git_access event.
+"""
+type RepoConfigDisableAnonymousGitAccessAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigDisableAnonymousGitAccessAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.disable_collaborators_only event.
+"""
+type RepoConfigDisableCollaboratorsOnlyAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigDisableCollaboratorsOnlyAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.disable_contributors_only event.
+"""
+type RepoConfigDisableContributorsOnlyAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigDisableContributorsOnlyAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.disable_sockpuppet_disallowed event.
+"""
+type RepoConfigDisableSockpuppetDisallowedAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigDisableSockpuppetDisallowedAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.enable_anonymous_git_access event.
+"""
+type RepoConfigEnableAnonymousGitAccessAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigEnableAnonymousGitAccessAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.enable_collaborators_only event.
+"""
+type RepoConfigEnableCollaboratorsOnlyAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigEnableCollaboratorsOnlyAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.enable_contributors_only event.
+"""
+type RepoConfigEnableContributorsOnlyAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigEnableContributorsOnlyAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.enable_sockpuppet_disallowed event.
+"""
+type RepoConfigEnableSockpuppetDisallowedAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigEnableSockpuppetDisallowedAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.lock_anonymous_git_access event.
+"""
+type RepoConfigLockAnonymousGitAccessAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigLockAnonymousGitAccessAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.unlock_anonymous_git_access event.
+"""
+type RepoConfigUnlockAnonymousGitAccessAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigUnlockAnonymousGitAccessAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.create event.
+"""
+type RepoCreateAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The name of the parent repository for this forked repository.
+  """
+  forkParentName: String
+
+  """
+  The name of the root repository for this network.
+  """
+  forkSourceName: String
+
+  """
+  The Node ID of the RepoCreateAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+
+  """
+  The visibility of the repository
+  """
+  visibility: RepoCreateAuditEntryVisibility
+}
+
+"""
+The privacy of a repository
+"""
+enum RepoCreateAuditEntryVisibility {
+  """
+  The repository is visible only to users in the same business.
+  """
+  INTERNAL
+
+  """
+  The repository is visible only to those with explicit access.
+  """
+  PRIVATE
+
+  """
+  The repository is visible to everyone.
+  """
+  PUBLIC
+}
+
+"""
+Audit log entry for a repo.destroy event.
+"""
+type RepoDestroyAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoDestroyAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+
+  """
+  The visibility of the repository
+  """
+  visibility: RepoDestroyAuditEntryVisibility
+}
+
+"""
+The privacy of a repository
+"""
+enum RepoDestroyAuditEntryVisibility {
+  """
+  The repository is visible only to users in the same business.
+  """
+  INTERNAL
+
+  """
+  The repository is visible only to those with explicit access.
+  """
+  PRIVATE
+
+  """
+  The repository is visible to everyone.
+  """
+  PUBLIC
+}
+
+"""
+Audit log entry for a repo.remove_member event.
+"""
+type RepoRemoveMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoRemoveMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+
+  """
+  The visibility of the repository
+  """
+  visibility: RepoRemoveMemberAuditEntryVisibility
+}
+
+"""
+The privacy of a repository
+"""
+enum RepoRemoveMemberAuditEntryVisibility {
+  """
+  The repository is visible only to users in the same business.
+  """
+  INTERNAL
+
+  """
+  The repository is visible only to those with explicit access.
+  """
+  PRIVATE
+
+  """
+  The repository is visible to everyone.
+  """
+  PUBLIC
+}
+
+"""
+Audit log entry for a repo.remove_topic event.
+"""
+type RepoRemoveTopicAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData & TopicAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoRemoveTopicAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The name of the topic added to the repository
+  """
+  topic: Topic
+
+  """
+  The name of the topic added to the repository
+  """
+  topicName: String
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The reasons a piece of content can be reported or minimized.
+"""
+enum ReportedContentClassifiers {
+  """
+  An abusive or harassing piece of content
+  """
+  ABUSE
+
+  """
+  A duplicated piece of content
+  """
+  DUPLICATE
+
+  """
+  An irrelevant piece of content
+  """
+  OFF_TOPIC
+
+  """
+  An outdated piece of content
+  """
+  OUTDATED
+
+  """
+  The content has been resolved
+  """
+  RESOLVED
+
+  """
+  A spammy piece of content
+  """
+  SPAM
+}
+
+"""
+A repository contains the content for a project.
+"""
+type Repository implements Node & PackageOwner & ProjectOwner & ProjectV2Recent & RepositoryInfo & Starrable & Subscribable & UniformResourceLocatable {
+  """
+  Whether or not a pull request head branch that is behind its base branch can
+  always be updated even if it is not required to be up to date before merging.
+  """
+  allowUpdateBranch: Boolean!
+
+  """
+  Identifies the date and time when the repository was archived.
+  """
+  archivedAt: DateTime
+
+  """
+  A list of users that can be assigned to issues in this repository.
+  """
+  assignableUsers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filters users with query on user name and login.
+    """
+    query: String
+  ): UserConnection!
+
+  """
+  Whether or not Auto-merge can be enabled on pull requests in this repository.
+  """
+  autoMergeAllowed: Boolean!
+
+  """
+  A list of branch protection rules for this repository.
+  """
+  branchProtectionRules(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): BranchProtectionRuleConnection!
+
+  """
+  Returns the code of conduct for this repository
+  """
+  codeOfConduct: CodeOfConduct
+
+  """
+  Information extracted from the repository's `CODEOWNERS` file.
+  """
+  codeowners(
+    """
+    The ref name used to return the associated `CODEOWNERS` file.
+    """
+    refName: String
+  ): RepositoryCodeowners
+
+  """
+  A list of collaborators associated with the repository.
+  """
+  collaborators(
+    """
+    Collaborators affiliation level with a repository.
+    """
+    affiliation: CollaboratorAffiliation
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    The login of one specific collaborator.
+    """
+    login: String
+
+    """
+    Filters users with query on user name and login
+    """
+    query: String
+  ): RepositoryCollaboratorConnection
+
+  """
+  A list of commit comments associated with the repository.
+  """
+  commitComments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CommitCommentConnection!
+
+  """
+  Returns a list of contact links associated to the repository
+  """
+  contactLinks: [RepositoryContactLink!]
+
+  """
+  Returns the contributing guidelines for this repository.
+  """
+  contributingGuidelines: ContributingGuidelines
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Ref associated with the repository's default branch.
+  """
+  defaultBranchRef: Ref
+
+  """
+  Whether or not branches are automatically deleted when merged in this repository.
+  """
+  deleteBranchOnMerge: Boolean!
+
+  """
+  A list of dependency manifests contained in the repository
+  """
+  dependencyGraphManifests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Cursor to paginate dependencies
+    """
+    dependenciesAfter: String
+
+    """
+    Number of dependencies to fetch
+    """
+    dependenciesFirst: Int
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Flag to scope to only manifests with dependencies
+    """
+    withDependencies: Boolean
+  ): DependencyGraphManifestConnection @preview(toggledBy: "hawkgirl-preview")
+
+  """
+  A list of deploy keys that are on this repository.
+  """
+  deployKeys(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DeployKeyConnection!
+
+  """
+  Deployments associated with the repository
+  """
+  deployments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Environments to list deployments for
+    """
+    environments: [String!]
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for deployments returned from the connection.
+    """
+    orderBy: DeploymentOrder = {field: CREATED_AT, direction: ASC}
+  ): DeploymentConnection!
+
+  """
+  The description of the repository.
+  """
+  description: String
+
+  """
+  The description of the repository rendered to HTML.
+  """
+  descriptionHTML: HTML!
+
+  """
+  Returns a single discussion from the current repository by number.
+  """
+  discussion(
+    """
+    The number for the discussion to be returned.
+    """
+    number: Int!
+  ): Discussion
+
+  """
+  A list of discussion categories that are available in the repository.
+  """
+  discussionCategories(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filter by categories that are assignable by the viewer.
+    """
+    filterByAssignable: Boolean = false
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DiscussionCategoryConnection!
+
+  """
+  A discussion category by slug.
+  """
+  discussionCategory(
+    """
+    The slug of the discussion category to be returned.
+    """
+    slug: String!
+  ): DiscussionCategory
+
+  """
+  A list of discussions that have been opened in the repository.
+  """
+  discussions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Only show answered or unanswered discussions
+    """
+    answered: Boolean = null
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Only include discussions that belong to the category with this ID.
+    """
+    categoryId: ID = null
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for discussions returned from the connection.
+    """
+    orderBy: DiscussionOrder = {field: UPDATED_AT, direction: DESC}
+
+    """
+    A list of states to filter the discussions by.
+    """
+    states: [DiscussionState!] = []
+  ): DiscussionConnection!
+
+  """
+  The number of kilobytes this repository occupies on disk.
+  """
+  diskUsage: Int
+
+  """
+  Returns a single active environment from the current repository by name.
+  """
+  environment(
+    """
+    The name of the environment to be returned.
+    """
+    name: String!
+  ): Environment
+
+  """
+  A list of environments that are in this repository.
+  """
+  environments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the environments
+    """
+    orderBy: Environments = {field: NAME, direction: ASC}
+  ): EnvironmentConnection!
+
+  """
+  Returns how many forks there are of this repository in the whole network.
+  """
+  forkCount: Int!
+
+  """
+  Whether this repository allows forks.
+  """
+  forkingAllowed: Boolean!
+
+  """
+  A list of direct forked repositories.
+  """
+  forks(
+    """
+    Array of viewer's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    current viewer owns.
+    """
+    affiliations: [RepositoryAffiliation]
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    If non-null, filters repositories according to whether they have issues enabled
+    """
+    hasIssuesEnabled: Boolean
+
+    """
+    If non-null, filters repositories according to whether they have been locked
+    """
+    isLocked: Boolean
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder
+
+    """
+    Array of owner's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    organization or user being viewed owns.
+    """
+    ownerAffiliations: [RepositoryAffiliation] = [OWNER, COLLABORATOR]
+
+    """
+    If non-null, filters repositories according to privacy
+    """
+    privacy: RepositoryPrivacy
+  ): RepositoryConnection!
+
+  """
+  The funding links for this repository
+  """
+  fundingLinks: [FundingLink!]!
+
+  """
+  Indicates if the repository has the Discussions feature enabled.
+  """
+  hasDiscussionsEnabled: Boolean!
+
+  """
+  Indicates if the repository has issues feature enabled.
+  """
+  hasIssuesEnabled: Boolean!
+
+  """
+  Indicates if the repository has the Projects feature enabled.
+  """
+  hasProjectsEnabled: Boolean!
+
+  """
+  Whether vulnerability alerts are enabled for the repository.
+  """
+  hasVulnerabilityAlertsEnabled: Boolean!
+
+  """
+  Indicates if the repository has wiki feature enabled.
+  """
+  hasWikiEnabled: Boolean!
+
+  """
+  The repository's URL.
+  """
+  homepageUrl: URI
+
+  """
+  The Node ID of the Repository object
+  """
+  id: ID!
+
+  """
+  The interaction ability settings for this repository.
+  """
+  interactionAbility: RepositoryInteractionAbility
+
+  """
+  Indicates if the repository is unmaintained.
+  """
+  isArchived: Boolean!
+
+  """
+  Returns true if blank issue creation is allowed
+  """
+  isBlankIssuesEnabled: Boolean!
+
+  """
+  Returns whether or not this repository disabled.
+  """
+  isDisabled: Boolean!
+
+  """
+  Returns whether or not this repository is empty.
+  """
+  isEmpty: Boolean!
+
+  """
+  Identifies if the repository is a fork.
+  """
+  isFork: Boolean!
+
+  """
+  Indicates if a repository is either owned by an organization, or is a private fork of an organization repository.
+  """
+  isInOrganization: Boolean!
+
+  """
+  Indicates if the repository has been locked or not.
+  """
+  isLocked: Boolean!
+
+  """
+  Identifies if the repository is a mirror.
+  """
+  isMirror: Boolean!
+
+  """
+  Identifies if the repository is private or internal.
+  """
+  isPrivate: Boolean!
+
+  """
+  Returns true if this repository has a security policy
+  """
+  isSecurityPolicyEnabled: Boolean
+
+  """
+  Identifies if the repository is a template that can be used to generate new repositories.
+  """
+  isTemplate: Boolean!
+
+  """
+  Is this repository a user configuration repository?
+  """
+  isUserConfigurationRepository: Boolean!
+
+  """
+  Returns a single issue from the current repository by number.
+  """
+  issue(
+    """
+    The number for the issue to be returned.
+    """
+    number: Int!
+  ): Issue
+
+  """
+  Returns a single issue-like object from the current repository by number.
+  """
+  issueOrPullRequest(
+    """
+    The number for the issue to be returned.
+    """
+    number: Int!
+  ): IssueOrPullRequest
+
+  """
+  Returns a list of issue templates associated to the repository
+  """
+  issueTemplates: [IssueTemplate!]
+
+  """
+  A list of issues that have been opened in the repository.
+  """
+  issues(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filtering options for issues returned from the connection.
+    """
+    filterBy: IssueFilters
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for issues returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the issues by.
+    """
+    states: [IssueState!]
+  ): IssueConnection!
+
+  """
+  Returns a single label by name
+  """
+  label(
+    """
+    Label name
+    """
+    name: String!
+  ): Label
+
+  """
+  A list of labels associated with the repository.
+  """
+  labels(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for labels returned from the connection.
+    """
+    orderBy: LabelOrder = {field: CREATED_AT, direction: ASC}
+
+    """
+    If provided, searches labels by name and description.
+    """
+    query: String
+  ): LabelConnection
+
+  """
+  A list containing a breakdown of the language composition of the repository.
+  """
+  languages(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: LanguageOrder
+  ): LanguageConnection
+
+  """
+  Get the latest release for the repository if one exists.
+  """
+  latestRelease: Release
+
+  """
+  The license associated with the repository
+  """
+  licenseInfo: License
+
+  """
+  The reason the repository has been locked.
+  """
+  lockReason: RepositoryLockReason
+
+  """
+  A list of Users that can be mentioned in the context of the repository.
+  """
+  mentionableUsers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filters users with query on user name and login
+    """
+    query: String
+  ): UserConnection!
+
+  """
+  Whether or not PRs are merged with a merge commit on this repository.
+  """
+  mergeCommitAllowed: Boolean!
+
+  """
+  How the default commit message will be generated when merging a pull request.
+  """
+  mergeCommitMessage: MergeCommitMessage!
+
+  """
+  How the default commit title will be generated when merging a pull request.
+  """
+  mergeCommitTitle: MergeCommitTitle!
+
+  """
+  The merge queue for a specified branch, otherwise the default branch if not provided.
+  """
+  mergeQueue(
+    """
+    The name of the branch to get the merge queue for. Case sensitive.
+    """
+    branch: String
+  ): MergeQueue
+
+  """
+  Returns a single milestone from the current repository by number.
+  """
+  milestone(
+    """
+    The number for the milestone to be returned.
+    """
+    number: Int!
+  ): Milestone
+
+  """
+  A list of milestones associated with the repository.
+  """
+  milestones(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for milestones.
+    """
+    orderBy: MilestoneOrder
+
+    """
+    Filters milestones with a query on the title
+    """
+    query: String
+
+    """
+    Filter by the state of the milestones.
+    """
+    states: [MilestoneState!]
+  ): MilestoneConnection
+
+  """
+  The repository's original mirror URL.
+  """
+  mirrorUrl: URI
+
+  """
+  The name of the repository.
+  """
+  name: String!
+
+  """
+  The repository's name with owner.
+  """
+  nameWithOwner: String!
+
+  """
+  A Git object in the repository
+  """
+  object(
+    """
+    A Git revision expression suitable for rev-parse
+    """
+    expression: String
+
+    """
+    The Git object ID
+    """
+    oid: GitObjectID
+  ): GitObject
+
+  """
+  The image used to represent this repository in Open Graph data.
+  """
+  openGraphImageUrl: URI!
+
+  """
+  The User owner of the repository.
+  """
+  owner: RepositoryOwner!
+
+  """
+  A list of packages under the owner.
+  """
+  packages(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Find packages by their names.
+    """
+    names: [String]
+
+    """
+    Ordering of the returned packages.
+    """
+    orderBy: PackageOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    Filter registry package by type.
+    """
+    packageType: PackageType
+
+    """
+    Find packages in a repository by ID.
+    """
+    repositoryId: ID
+  ): PackageConnection!
+
+  """
+  The repository parent, if this is a fork.
+  """
+  parent: Repository
+
+  """
+  A list of discussions that have been pinned in this repository.
+  """
+  pinnedDiscussions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PinnedDiscussionConnection!
+
+  """
+  A list of pinned issues for this repository.
+  """
+  pinnedIssues(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PinnedIssueConnection
+
+  """
+  The primary language of the repository's code.
+  """
+  primaryLanguage: Language
+
+  """
+  Find project by number.
+  """
+  project(
+    """
+    The project number to find.
+    """
+    number: Int!
+  ): Project
+
+  """
+  Finds and returns the Project according to the provided Project number.
+  """
+  projectV2(
+    """
+    The Project number.
+    """
+    number: Int!
+  ): ProjectV2
+
+  """
+  A list of projects under the owner.
+  """
+  projects(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for projects returned from the connection
+    """
+    orderBy: ProjectOrder
+
+    """
+    Query to search projects by, currently only searching by name.
+    """
+    search: String
+
+    """
+    A list of states to filter the projects by.
+    """
+    states: [ProjectState!]
+  ): ProjectConnection!
+
+  """
+  The HTTP path listing the repository's projects
+  """
+  projectsResourcePath: URI!
+
+  """
+  The HTTP URL listing the repository's projects
+  """
+  projectsUrl: URI!
+
+  """
+  List of projects linked to this repository.
+  """
+  projectsV2(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    How to order the returned projects.
+    """
+    orderBy: ProjectV2Order = {field: NUMBER, direction: DESC}
+
+    """
+    A project to search for linked to the repo.
+    """
+    query: String
+  ): ProjectV2Connection!
+
+  """
+  Returns a single pull request from the current repository by number.
+  """
+  pullRequest(
+    """
+    The number for the pull request to be returned.
+    """
+    number: Int!
+  ): PullRequest
+
+  """
+  Returns a list of pull request templates associated to the repository
+  """
+  pullRequestTemplates: [PullRequestTemplate!]
+
+  """
+  A list of pull requests that have been opened in the repository.
+  """
+  pullRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    The base ref name to filter the pull requests by.
+    """
+    baseRefName: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    The head ref name to filter the pull requests by.
+    """
+    headRefName: String
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pull requests returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the pull requests by.
+    """
+    states: [PullRequestState!]
+  ): PullRequestConnection!
+
+  """
+  Identifies the date and time when the repository was last pushed to.
+  """
+  pushedAt: DateTime
+
+  """
+  Whether or not rebase-merging is enabled on this repository.
+  """
+  rebaseMergeAllowed: Boolean!
+
+  """
+  Recent projects that this user has modified in the context of the owner.
+  """
+  recentProjects(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2Connection!
+
+  """
+  Fetch a given ref from the repository
+  """
+  ref(
+    """
+    The ref to retrieve. Fully qualified matches are checked in order
+    (`refs/heads/master`) before falling back onto checks for short name matches (`master`).
+    """
+    qualifiedName: String!
+  ): Ref
+
+  """
+  Fetch a list of refs from the repository
+  """
+  refs(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    DEPRECATED: use orderBy. The ordering direction.
+    """
+    direction: OrderDirection
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for refs returned from the connection.
+    """
+    orderBy: RefOrder
+
+    """
+    Filters refs with query on name
+    """
+    query: String
+
+    """
+    A ref name prefix like `refs/heads/`, `refs/tags/`, etc.
+    """
+    refPrefix: String!
+  ): RefConnection
+
+  """
+  Lookup a single release given various criteria.
+  """
+  release(
+    """
+    The name of the Tag the Release was created from
+    """
+    tagName: String!
+  ): Release
+
+  """
+  List of releases which are dependent on this repository.
+  """
+  releases(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: ReleaseOrder
+  ): ReleaseConnection!
+
+  """
+  A list of applied repository-topic associations for this repository.
+  """
+  repositoryTopics(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): RepositoryTopicConnection!
+
+  """
+  The HTTP path for this repository
+  """
+  resourcePath: URI!
+
+  """
+  Returns a single ruleset from the current repository by ID.
+  """
+  ruleset(
+    """
+    The ID of the ruleset to be returned.
+    """
+    databaseId: Int!
+
+    """
+    Include rulesets configured at higher levels that apply to this repository
+    """
+    includeParents: Boolean = true
+  ): RepositoryRuleset
+
+  """
+  A list of rulesets for this repository.
+  """
+  rulesets(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Return rulesets configured at higher levels that apply to this repository
+    """
+    includeParents: Boolean = true
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): RepositoryRulesetConnection
+
+  """
+  The security policy URL.
+  """
+  securityPolicyUrl: URI
+
+  """
+  A description of the repository, rendered to HTML without any links in it.
+  """
+  shortDescriptionHTML(
+    """
+    How many characters to return.
+    """
+    limit: Int = 200
+  ): HTML!
+
+  """
+  Whether or not squash-merging is enabled on this repository.
+  """
+  squashMergeAllowed: Boolean!
+
+  """
+  How the default commit message will be generated when squash merging a pull request.
+  """
+  squashMergeCommitMessage: SquashMergeCommitMessage!
+
+  """
+  How the default commit title will be generated when squash merging a pull request.
+  """
+  squashMergeCommitTitle: SquashMergeCommitTitle!
+
+  """
+  Whether a squash merge commit can use the pull request title as default.
+  """
+  squashPrTitleUsedAsDefault: Boolean!
+    @deprecated(
+      reason: "`squashPrTitleUsedAsDefault` will be removed. Use `Repository.squashMergeCommitTitle` instead. Removal on 2023-04-01 UTC."
+    )
+
+  """
+  The SSH URL to clone this repository
+  """
+  sshUrl: GitSSHRemote!
+
+  """
+  Returns a count of how many stargazers there are on this object
+  """
+  stargazerCount: Int!
+
+  """
+  A list of users who have starred this starrable.
+  """
+  stargazers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: StarOrder
+  ): StargazerConnection!
+
+  """
+  Returns a list of all submodules in this repository parsed from the
+  .gitmodules file as of the default branch's HEAD commit.
+  """
+  submodules(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): SubmoduleConnection!
+
+  """
+  Temporary authentication token for cloning this repository.
+  """
+  tempCloneToken: String
+
+  """
+  The repository from which this repository was generated, if any.
+  """
+  templateRepository: Repository
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this repository
+  """
+  url: URI!
+
+  """
+  Whether this repository has a custom image to use with Open Graph as opposed to being represented by the owner's avatar.
+  """
+  usesCustomOpenGraphImage: Boolean!
+
+  """
+  Indicates whether the viewer has admin permissions on this repository.
+  """
+  viewerCanAdminister: Boolean!
+
+  """
+  Can the current viewer create new projects on this owner.
+  """
+  viewerCanCreateProjects: Boolean!
+
+  """
+  Check if the viewer is able to change their subscription status for the repository.
+  """
+  viewerCanSubscribe: Boolean!
+
+  """
+  Indicates whether the viewer can update the topics of this repository.
+  """
+  viewerCanUpdateTopics: Boolean!
+
+  """
+  The last commit email for the viewer.
+  """
+  viewerDefaultCommitEmail: String
+
+  """
+  The last used merge method by the viewer or the default for the repository.
+  """
+  viewerDefaultMergeMethod: PullRequestMergeMethod!
+
+  """
+  Returns a boolean indicating whether the viewing user has starred this starrable.
+  """
+  viewerHasStarred: Boolean!
+
+  """
+  The users permission level on the repository. Will return null if authenticated as an GitHub App.
+  """
+  viewerPermission: RepositoryPermission
+
+  """
+  A list of emails this viewer can commit with.
+  """
+  viewerPossibleCommitEmails: [String!]
+
+  """
+  Identifies if the viewer is watching, not watching, or ignoring the subscribable entity.
+  """
+  viewerSubscription: SubscriptionState
+
+  """
+  Indicates the repository's visibility level.
+  """
+  visibility: RepositoryVisibility!
+
+  """
+  Returns a single vulnerability alert from the current repository by number.
+  """
+  vulnerabilityAlert(
+    """
+    The number for the vulnerability alert to be returned.
+    """
+    number: Int!
+  ): RepositoryVulnerabilityAlert
+
+  """
+  A list of vulnerability alerts that are on this repository.
+  """
+  vulnerabilityAlerts(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filter by the scope of the alert's dependency
+    """
+    dependencyScopes: [RepositoryVulnerabilityAlertDependencyScope!]
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter by the state of the alert
+    """
+    states: [RepositoryVulnerabilityAlertState!]
+  ): RepositoryVulnerabilityAlertConnection
+
+  """
+  A list of users watching the repository.
+  """
+  watchers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  Whether contributors are required to sign off on web-based commits in this repository.
+  """
+  webCommitSignoffRequired: Boolean!
+}
+
+"""
+The affiliation of a user to a repository
+"""
+enum RepositoryAffiliation {
+  """
+  Repositories that the user has been added to as a collaborator.
+  """
+  COLLABORATOR
+
+  """
+  Repositories that the user has access to through being a member of an
+  organization. This includes every repository on every team that the user is on.
+  """
+  ORGANIZATION_MEMBER
+
+  """
+  Repositories that are owned by the authenticated user.
+  """
+  OWNER
+}
+
+"""
+Metadata for an audit entry with action repo.*
+"""
+interface RepositoryAuditEntryData {
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+}
+
+"""
+Information extracted from a repository's `CODEOWNERS` file.
+"""
+type RepositoryCodeowners {
+  """
+  Any problems that were encountered while parsing the `CODEOWNERS` file.
+  """
+  errors: [RepositoryCodeownersError!]!
+}
+
+"""
+An error in a `CODEOWNERS` file.
+"""
+type RepositoryCodeownersError {
+  """
+  The column number where the error occurs.
+  """
+  column: Int!
+
+  """
+  A short string describing the type of error.
+  """
+  kind: String!
+
+  """
+  The line number where the error occurs.
+  """
+  line: Int!
+
+  """
+  A complete description of the error, combining information from other fields.
+  """
+  message: String!
+
+  """
+  The path to the file when the error occurs.
+  """
+  path: String!
+
+  """
+  The content of the line where the error occurs.
+  """
+  source: String!
+
+  """
+  A suggestion of how to fix the error.
+  """
+  suggestion: String
+}
+
+"""
+The connection type for User.
+"""
+type RepositoryCollaboratorConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryCollaboratorEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a user who is a collaborator of a repository.
+"""
+type RepositoryCollaboratorEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+  node: User!
+
+  """
+  The permission the user has on the repository.
+  """
+  permission: RepositoryPermission!
+
+  """
+  A list of sources for the user's access to the repository.
+  """
+  permissionSources: [PermissionSource!]
+}
+
+"""
+A list of repositories owned by the subject.
+"""
+type RepositoryConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Repository]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+
+  """
+  The total size in kilobytes of all repositories in the connection. Value will
+  never be larger than max 32-bit signed integer.
+  """
+  totalDiskUsage: Int!
+}
+
+"""
+A repository contact link.
+"""
+type RepositoryContactLink {
+  """
+  The contact link purpose.
+  """
+  about: String!
+
+  """
+  The contact link name.
+  """
+  name: String!
+
+  """
+  The contact link URL.
+  """
+  url: URI!
+}
+
+"""
+The reason a repository is listed as 'contributed'.
+"""
+enum RepositoryContributionType {
+  """
+  Created a commit
+  """
+  COMMIT
+
+  """
+  Created an issue
+  """
+  ISSUE
+
+  """
+  Created a pull request
+  """
+  PULL_REQUEST
+
+  """
+  Reviewed a pull request
+  """
+  PULL_REQUEST_REVIEW
+
+  """
+  Created the repository
+  """
+  REPOSITORY
+}
+
+"""
+Represents an author of discussions in repositories.
+"""
+interface RepositoryDiscussionAuthor {
+  """
+  Discussions this user has started.
+  """
+  repositoryDiscussions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Filter discussions to only those that have been answered or not. Defaults to
+    including both answered and unanswered discussions.
+    """
+    answered: Boolean = null
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for discussions returned from the connection.
+    """
+    orderBy: DiscussionOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    Filter discussions to only those in a specific repository.
+    """
+    repositoryId: ID
+
+    """
+    A list of states to filter the discussions by.
+    """
+    states: [DiscussionState!] = []
+  ): DiscussionConnection!
+}
+
+"""
+Represents an author of discussion comments in repositories.
+"""
+interface RepositoryDiscussionCommentAuthor {
+  """
+  Discussion comments this user has authored.
+  """
+  repositoryDiscussionComments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter discussion comments to only those that were marked as the answer
+    """
+    onlyAnswers: Boolean = false
+
+    """
+    Filter discussion comments to only those in a specific repository.
+    """
+    repositoryId: ID
+  ): DiscussionCommentConnection!
+}
+
+"""
+An edge in a connection.
+"""
+type RepositoryEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Repository
+}
+
+"""
+Parameters to be used for the repository_id condition
+"""
+type RepositoryIdConditionTarget {
+  """
+  One of these repo IDs must match the repo.
+  """
+  repositoryIds: [ID!]!
+}
+
+"""
+Parameters to be used for the repository_id condition
+"""
+input RepositoryIdConditionTargetInput {
+  """
+  One of these repo IDs must match the repo.
+  """
+  repositoryIds: [ID!]!
+}
+
+"""
+A subset of repository info.
+"""
+interface RepositoryInfo {
+  """
+  Identifies the date and time when the repository was archived.
+  """
+  archivedAt: DateTime
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The description of the repository.
+  """
+  description: String
+
+  """
+  The description of the repository rendered to HTML.
+  """
+  descriptionHTML: HTML!
+
+  """
+  Returns how many forks there are of this repository in the whole network.
+  """
+  forkCount: Int!
+
+  """
+  Indicates if the repository has the Discussions feature enabled.
+  """
+  hasDiscussionsEnabled: Boolean!
+
+  """
+  Indicates if the repository has issues feature enabled.
+  """
+  hasIssuesEnabled: Boolean!
+
+  """
+  Indicates if the repository has the Projects feature enabled.
+  """
+  hasProjectsEnabled: Boolean!
+
+  """
+  Indicates if the repository has wiki feature enabled.
+  """
+  hasWikiEnabled: Boolean!
+
+  """
+  The repository's URL.
+  """
+  homepageUrl: URI
+
+  """
+  Indicates if the repository is unmaintained.
+  """
+  isArchived: Boolean!
+
+  """
+  Identifies if the repository is a fork.
+  """
+  isFork: Boolean!
+
+  """
+  Indicates if a repository is either owned by an organization, or is a private fork of an organization repository.
+  """
+  isInOrganization: Boolean!
+
+  """
+  Indicates if the repository has been locked or not.
+  """
+  isLocked: Boolean!
+
+  """
+  Identifies if the repository is a mirror.
+  """
+  isMirror: Boolean!
+
+  """
+  Identifies if the repository is private or internal.
+  """
+  isPrivate: Boolean!
+
+  """
+  Identifies if the repository is a template that can be used to generate new repositories.
+  """
+  isTemplate: Boolean!
+
+  """
+  The license associated with the repository
+  """
+  licenseInfo: License
+
+  """
+  The reason the repository has been locked.
+  """
+  lockReason: RepositoryLockReason
+
+  """
+  The repository's original mirror URL.
+  """
+  mirrorUrl: URI
+
+  """
+  The name of the repository.
+  """
+  name: String!
+
+  """
+  The repository's name with owner.
+  """
+  nameWithOwner: String!
+
+  """
+  The image used to represent this repository in Open Graph data.
+  """
+  openGraphImageUrl: URI!
+
+  """
+  The User owner of the repository.
+  """
+  owner: RepositoryOwner!
+
+  """
+  Identifies the date and time when the repository was last pushed to.
+  """
+  pushedAt: DateTime
+
+  """
+  The HTTP path for this repository
+  """
+  resourcePath: URI!
+
+  """
+  A description of the repository, rendered to HTML without any links in it.
+  """
+  shortDescriptionHTML(
+    """
+    How many characters to return.
+    """
+    limit: Int = 200
+  ): HTML!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this repository
+  """
+  url: URI!
+
+  """
+  Whether this repository has a custom image to use with Open Graph as opposed to being represented by the owner's avatar.
+  """
+  usesCustomOpenGraphImage: Boolean!
+
+  """
+  Indicates the repository's visibility level.
+  """
+  visibility: RepositoryVisibility!
+}
+
+"""
+Repository interaction limit that applies to this object.
+"""
+type RepositoryInteractionAbility {
+  """
+  The time the currently active limit expires.
+  """
+  expiresAt: DateTime
+
+  """
+  The current limit that is enabled on this object.
+  """
+  limit: RepositoryInteractionLimit!
+
+  """
+  The origin of the currently active interaction limit.
+  """
+  origin: RepositoryInteractionLimitOrigin!
+}
+
+"""
+A repository interaction limit.
+"""
+enum RepositoryInteractionLimit {
+  """
+  Users that are not collaborators will not be able to interact with the repository.
+  """
+  COLLABORATORS_ONLY
+
+  """
+  Users that have not previously committed to a repository’s default branch will be unable to interact with the repository.
+  """
+  CONTRIBUTORS_ONLY
+
+  """
+  Users that have recently created their account will be unable to interact with the repository.
+  """
+  EXISTING_USERS
+
+  """
+  No interaction limits are enabled.
+  """
+  NO_LIMIT
+}
+
+"""
+The length for a repository interaction limit to be enabled for.
+"""
+enum RepositoryInteractionLimitExpiry {
+  """
+  The interaction limit will expire after 1 day.
+  """
+  ONE_DAY
+
+  """
+  The interaction limit will expire after 1 month.
+  """
+  ONE_MONTH
+
+  """
+  The interaction limit will expire after 1 week.
+  """
+  ONE_WEEK
+
+  """
+  The interaction limit will expire after 6 months.
+  """
+  SIX_MONTHS
+
+  """
+  The interaction limit will expire after 3 days.
+  """
+  THREE_DAYS
+}
+
+"""
+Indicates where an interaction limit is configured.
+"""
+enum RepositoryInteractionLimitOrigin {
+  """
+  A limit that is configured at the organization level.
+  """
+  ORGANIZATION
+
+  """
+  A limit that is configured at the repository level.
+  """
+  REPOSITORY
+
+  """
+  A limit that is configured at the user-wide level.
+  """
+  USER
+}
+
+"""
+An invitation for a user to be added to a repository.
+"""
+type RepositoryInvitation implements Node {
+  """
+  The email address that received the invitation.
+  """
+  email: String
+
+  """
+  The Node ID of the RepositoryInvitation object
+  """
+  id: ID!
+
+  """
+  The user who received the invitation.
+  """
+  invitee: User
+
+  """
+  The user who created the invitation.
+  """
+  inviter: User!
+
+  """
+  The permalink for this repository invitation.
+  """
+  permalink: URI!
+
+  """
+  The permission granted on this repository by this invitation.
+  """
+  permission: RepositoryPermission!
+
+  """
+  The Repository the user is invited to.
+  """
+  repository: RepositoryInfo
+}
+
+"""
+A list of repository invitations.
+"""
+type RepositoryInvitationConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryInvitationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [RepositoryInvitation]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type RepositoryInvitationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: RepositoryInvitation
+}
+
+"""
+Ordering options for repository invitation connections.
+"""
+input RepositoryInvitationOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order repository invitations by.
+  """
+  field: RepositoryInvitationOrderField!
+}
+
+"""
+Properties by which repository invitation connections can be ordered.
+"""
+enum RepositoryInvitationOrderField {
+  """
+  Order repository invitations by creation time
+  """
+  CREATED_AT
+}
+
+"""
+The possible reasons a given repository could be in a locked state.
+"""
+enum RepositoryLockReason {
+  """
+  The repository is locked due to a billing related reason.
+  """
+  BILLING
+
+  """
+  The repository is locked due to a migration.
+  """
+  MIGRATING
+
+  """
+  The repository is locked due to a move.
+  """
+  MOVING
+
+  """
+  The repository is locked due to a rename.
+  """
+  RENAME
+
+  """
+  The repository is locked due to a trade controls related reason.
+  """
+  TRADE_RESTRICTION
+
+  """
+  The repository is locked due to an ownership transfer.
+  """
+  TRANSFERRING_OWNERSHIP
+}
+
+"""
+A GitHub Enterprise Importer (GEI) repository migration.
+"""
+type RepositoryMigration implements Migration & Node {
+  """
+  The migration flag to continue on error.
+  """
+  continueOnError: Boolean!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: String
+
+  """
+  The reason the migration failed.
+  """
+  failureReason: String
+
+  """
+  The Node ID of the RepositoryMigration object
+  """
+  id: ID!
+
+  """
+  The URL for the migration log (expires 1 day after migration completes).
+  """
+  migrationLogUrl: URI
+
+  """
+  The migration source.
+  """
+  migrationSource: MigrationSource!
+
+  """
+  The target repository name.
+  """
+  repositoryName: String!
+
+  """
+  The migration source URL, for example `https://github.com` or `https://monalisa.ghe.com`.
+  """
+  sourceUrl: URI!
+
+  """
+  The migration state.
+  """
+  state: MigrationState!
+
+  """
+  The number of warnings encountered for this migration. To review the warnings,
+  check the [Migration Log](https://docs.github.com/en/migrations/using-github-enterprise-importer/completing-your-migration-with-github-enterprise-importer/accessing-your-migration-logs-for-github-enterprise-importer).
+  """
+  warningsCount: Int!
+}
+
+"""
+The connection type for RepositoryMigration.
+"""
+type RepositoryMigrationConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryMigrationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [RepositoryMigration]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a repository migration.
+"""
+type RepositoryMigrationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: RepositoryMigration
+}
+
+"""
+Ordering options for repository migrations.
+"""
+input RepositoryMigrationOrder {
+  """
+  The ordering direction.
+  """
+  direction: RepositoryMigrationOrderDirection!
+
+  """
+  The field to order repository migrations by.
+  """
+  field: RepositoryMigrationOrderField!
+}
+
+"""
+Possible directions in which to order a list of repository migrations when provided an `orderBy` argument.
+"""
+enum RepositoryMigrationOrderDirection {
+  """
+  Specifies an ascending order for a given `orderBy` argument.
+  """
+  ASC
+
+  """
+  Specifies a descending order for a given `orderBy` argument.
+  """
+  DESC
+}
+
+"""
+Properties by which repository migrations can be ordered.
+"""
+enum RepositoryMigrationOrderField {
+  """
+  Order mannequins why when they were created.
+  """
+  CREATED_AT
+}
+
+"""
+Parameters to be used for the repository_name condition
+"""
+type RepositoryNameConditionTarget {
+  """
+  Array of repository names or patterns to exclude. The condition will not pass if any of these patterns match.
+  """
+  exclude: [String!]!
+
+  """
+  Array of repository names or patterns to include. One of these patterns must
+  match for the condition to pass. Also accepts `~ALL` to include all repositories.
+  """
+  include: [String!]!
+
+  """
+  Target changes that match these patterns will be prevented except by those with bypass permissions.
+  """
+  protected: Boolean!
+}
+
+"""
+Parameters to be used for the repository_name condition
+"""
+input RepositoryNameConditionTargetInput {
+  """
+  Array of repository names or patterns to exclude. The condition will not pass if any of these patterns match.
+  """
+  exclude: [String!]!
+
+  """
+  Array of repository names or patterns to include. One of these patterns must
+  match for the condition to pass. Also accepts `~ALL` to include all repositories.
+  """
+  include: [String!]!
+
+  """
+  Target changes that match these patterns will be prevented except by those with bypass permissions.
+  """
+  protected: Boolean
+}
+
+"""
+Represents a object that belongs to a repository.
+"""
+interface RepositoryNode {
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+}
+
+"""
+Ordering options for repository connections
+"""
+input RepositoryOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order repositories by.
+  """
+  field: RepositoryOrderField!
+}
+
+"""
+Properties by which repository connections can be ordered.
+"""
+enum RepositoryOrderField {
+  """
+  Order repositories by creation time
+  """
+  CREATED_AT
+
+  """
+  Order repositories by name
+  """
+  NAME
+
+  """
+  Order repositories by push time
+  """
+  PUSHED_AT
+
+  """
+  Order repositories by number of stargazers
+  """
+  STARGAZERS
+
+  """
+  Order repositories by update time
+  """
+  UPDATED_AT
+}
+
+"""
+Represents an owner of a Repository.
+"""
+interface RepositoryOwner {
+  """
+  A URL pointing to the owner's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  The Node ID of the RepositoryOwner object
+  """
+  id: ID!
+
+  """
+  The username used to login.
+  """
+  login: String!
+
+  """
+  A list of repositories that the user owns.
+  """
+  repositories(
+    """
+    Array of viewer's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    current viewer owns.
+    """
+    affiliations: [RepositoryAffiliation]
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    If non-null, filters repositories according to whether they have issues enabled
+    """
+    hasIssuesEnabled: Boolean
+
+    """
+    If non-null, filters repositories according to whether they are archived and not maintained
+    """
+    isArchived: Boolean
+
+    """
+    If non-null, filters repositories according to whether they are forks of another repository
+    """
+    isFork: Boolean
+
+    """
+    If non-null, filters repositories according to whether they have been locked
+    """
+    isLocked: Boolean
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder
+
+    """
+    Array of owner's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    organization or user being viewed owns.
+    """
+    ownerAffiliations: [RepositoryAffiliation] = [OWNER, COLLABORATOR]
+
+    """
+    If non-null, filters repositories according to privacy
+    """
+    privacy: RepositoryPrivacy
+  ): RepositoryConnection!
+
+  """
+  Find Repository.
+  """
+  repository(
+    """
+    Follow repository renames. If disabled, a repository referenced by its old name will return an error.
+    """
+    followRenames: Boolean = true
+
+    """
+    Name of Repository to find.
+    """
+    name: String!
+  ): Repository
+
+  """
+  The HTTP URL for the owner.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for the owner.
+  """
+  url: URI!
+}
+
+"""
+The access level to a repository
+"""
+enum RepositoryPermission {
+  """
+  Can read, clone, and push to this repository. Can also manage issues, pull
+  requests, and repository settings, including adding collaborators
+  """
+  ADMIN
+
+  """
+  Can read, clone, and push to this repository. They can also manage issues, pull requests, and some repository settings
+  """
+  MAINTAIN
+
+  """
+  Can read and clone this repository. Can also open and comment on issues and pull requests
+  """
+  READ
+
+  """
+  Can read and clone this repository. Can also manage issues and pull requests
+  """
+  TRIAGE
+
+  """
+  Can read, clone, and push to this repository. Can also manage issues and pull requests
+  """
+  WRITE
+}
+
+"""
+The privacy of a repository
+"""
+enum RepositoryPrivacy {
+  """
+  Private
+  """
+  PRIVATE
+
+  """
+  Public
+  """
+  PUBLIC
+}
+
+"""
+A repository rule.
+"""
+type RepositoryRule implements Node {
+  """
+  The Node ID of the RepositoryRule object
+  """
+  id: ID!
+
+  """
+  The parameters for this rule.
+  """
+  parameters: RuleParameters
+
+  """
+  The repository ruleset associated with this rule configuration
+  """
+  repositoryRuleset: RepositoryRuleset
+
+  """
+  The type of rule.
+  """
+  type: RepositoryRuleType!
+}
+
+"""
+Set of conditions that determine if a ruleset will evaluate
+"""
+type RepositoryRuleConditions {
+  """
+  Configuration for the ref_name condition
+  """
+  refName: RefNameConditionTarget
+
+  """
+  Configuration for the repository_id condition
+  """
+  repositoryId: RepositoryIdConditionTarget
+
+  """
+  Configuration for the repository_name condition
+  """
+  repositoryName: RepositoryNameConditionTarget
+}
+
+"""
+Specifies the conditions required for a ruleset to evaluate
+"""
+input RepositoryRuleConditionsInput {
+  """
+  Configuration for the ref_name condition
+  """
+  refName: RefNameConditionTargetInput
+
+  """
+  Configuration for the repository_id condition
+  """
+  repositoryId: RepositoryIdConditionTargetInput
+
+  """
+  Configuration for the repository_name condition
+  """
+  repositoryName: RepositoryNameConditionTargetInput
+}
+
+"""
+The connection type for RepositoryRule.
+"""
+type RepositoryRuleConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryRuleEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [RepositoryRule]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type RepositoryRuleEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: RepositoryRule
+}
+
+"""
+Specifies the attributes for a new or updated rule.
+"""
+input RepositoryRuleInput {
+  """
+  Optional ID of this rule when updating
+  """
+  id: ID @possibleTypes(concreteTypes: ["RepositoryRule"])
+
+  """
+  The parameters for the rule.
+  """
+  parameters: RuleParametersInput
+
+  """
+  The type of rule to create.
+  """
+  type: RepositoryRuleType!
+}
+
+"""
+The rule types supported in rulesets
+"""
+enum RepositoryRuleType {
+  """
+  Authorization
+  """
+  AUTHORIZATION
+
+  """
+  Branch name pattern
+  """
+  BRANCH_NAME_PATTERN
+
+  """
+  Committer email pattern
+  """
+  COMMITTER_EMAIL_PATTERN
+
+  """
+  Commit author email pattern
+  """
+  COMMIT_AUTHOR_EMAIL_PATTERN
+
+  """
+  Commit message pattern
+  """
+  COMMIT_MESSAGE_PATTERN
+
+  """
+  Only allow users with bypass permission to create matching refs.
+  """
+  CREATION
+
+  """
+  Only allow users with bypass permissions to delete matching refs.
+  """
+  DELETION
+
+  """
+  Branch is read-only. Users cannot push to the branch.
+  """
+  LOCK_BRANCH
+
+  """
+  Max ref updates
+  """
+  MAX_REF_UPDATES
+
+  """
+  Merges must be performed via a merge queue.
+  """
+  MERGE_QUEUE
+
+  """
+  Merge queue locked ref
+  """
+  MERGE_QUEUE_LOCKED_REF
+
+  """
+  Prevent users with push access from force pushing to refs.
+  """
+  NON_FAST_FORWARD
+
+  """
+  Require all commits be made to a non-target branch and submitted via a pull request before they can be merged.
+  """
+  PULL_REQUEST
+
+  """
+  Choose which environments must be successfully deployed to before refs can be pushed into a ref that matches this rule.
+  """
+  REQUIRED_DEPLOYMENTS
+
+  """
+  Prevent merge commits from being pushed to matching refs.
+  """
+  REQUIRED_LINEAR_HISTORY
+
+  """
+  When enabled, all conversations on code must be resolved before a pull request
+  can be merged into a branch that matches this rule.
+  """
+  REQUIRED_REVIEW_THREAD_RESOLUTION
+
+  """
+  Commits pushed to matching refs must have verified signatures.
+  """
+  REQUIRED_SIGNATURES
+
+  """
+  Choose which status checks must pass before the ref is updated. When enabled,
+  commits must first be pushed to another ref where the checks pass.
+  """
+  REQUIRED_STATUS_CHECKS
+
+  """
+  Require all commits be made to a non-target branch and submitted via a pull
+  request and required workflow checks to pass before they can be merged.
+  """
+  REQUIRED_WORKFLOW_STATUS_CHECKS
+
+  """
+  Commits pushed to matching refs must have verified signatures.
+  """
+  RULESET_REQUIRED_SIGNATURES
+
+  """
+  Secret scanning
+  """
+  SECRET_SCANNING
+
+  """
+  Tag
+  """
+  TAG
+
+  """
+  Tag name pattern
+  """
+  TAG_NAME_PATTERN
+
+  """
+  Only allow users with bypass permission to update matching refs.
+  """
+  UPDATE
+
+  """
+  Require all changes made to a targeted branch to pass the specified workflows before they can be merged.
+  """
+  WORKFLOWS
+
+  """
+  Workflow files cannot be modified.
+  """
+  WORKFLOW_UPDATES
+}
+
+"""
+A repository ruleset.
+"""
+type RepositoryRuleset implements Node {
+  """
+  The actors that can bypass this ruleset
+  """
+  bypassActors(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): RepositoryRulesetBypassActorConnection
+
+  """
+  The set of conditions that must evaluate to true for this ruleset to apply
+  """
+  conditions: RepositoryRuleConditions!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The enforcement level of this ruleset
+  """
+  enforcement: RuleEnforcement!
+
+  """
+  The Node ID of the RepositoryRuleset object
+  """
+  id: ID!
+
+  """
+  Name of the ruleset.
+  """
+  name: String!
+
+  """
+  List of rules.
+  """
+  rules(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    The type of rule.
+    """
+    type: RepositoryRuleType
+  ): RepositoryRuleConnection
+
+  """
+  Source of ruleset.
+  """
+  source: RuleSource!
+
+  """
+  Target of the ruleset.
+  """
+  target: RepositoryRulesetTarget
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+A team or app that has the ability to bypass a rules defined on a ruleset
+"""
+type RepositoryRulesetBypassActor implements Node {
+  """
+  The actor that can bypass rules.
+  """
+  actor: BypassActor
+
+  """
+  The mode for the bypass actor
+  """
+  bypassMode: RepositoryRulesetBypassActorBypassMode
+
+  """
+  The Node ID of the RepositoryRulesetBypassActor object
+  """
+  id: ID!
+
+  """
+  This actor represents the ability for an organization owner to bypass
+  """
+  organizationAdmin: Boolean!
+
+  """
+  If the actor is a repository role, the repository role's ID that can bypass
+  """
+  repositoryRoleDatabaseId: Int
+
+  """
+  If the actor is a repository role, the repository role's name that can bypass
+  """
+  repositoryRoleName: String
+
+  """
+  Identifies the ruleset associated with the allowed actor
+  """
+  repositoryRuleset: RepositoryRuleset
+}
+
+"""
+The bypass mode for a specific actor on a ruleset.
+"""
+enum RepositoryRulesetBypassActorBypassMode {
+  """
+  The actor can always bypass rules
+  """
+  ALWAYS
+
+  """
+  The actor can only bypass rules via a pull request
+  """
+  PULL_REQUEST
+}
+
+"""
+The connection type for RepositoryRulesetBypassActor.
+"""
+type RepositoryRulesetBypassActorConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryRulesetBypassActorEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [RepositoryRulesetBypassActor]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type RepositoryRulesetBypassActorEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: RepositoryRulesetBypassActor
+}
+
+"""
+Specifies the attributes for a new or updated ruleset bypass actor. Only one of
+`actor_id`, `repository_role_database_id`, or `organization_admin` should be specified.
+"""
+input RepositoryRulesetBypassActorInput {
+  """
+  For Team and Integration bypasses, the Team or Integration ID
+  """
+  actorId: ID
+
+  """
+  The bypass mode for this actor.
+  """
+  bypassMode: RepositoryRulesetBypassActorBypassMode!
+
+  """
+  For organization owner bypasses, true
+  """
+  organizationAdmin: Boolean
+
+  """
+  For role bypasses, the role database ID
+  """
+  repositoryRoleDatabaseId: Int
+}
+
+"""
+The connection type for RepositoryRuleset.
+"""
+type RepositoryRulesetConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryRulesetEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [RepositoryRuleset]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type RepositoryRulesetEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: RepositoryRuleset
+}
+
+"""
+The targets supported for rulesets
+"""
+enum RepositoryRulesetTarget {
+  """
+  Branch
+  """
+  BRANCH
+
+  """
+  Tag
+  """
+  TAG
+}
+
+"""
+A repository-topic connects a repository to a topic.
+"""
+type RepositoryTopic implements Node & UniformResourceLocatable {
+  """
+  The Node ID of the RepositoryTopic object
+  """
+  id: ID!
+
+  """
+  The HTTP path for this repository-topic.
+  """
+  resourcePath: URI!
+
+  """
+  The topic.
+  """
+  topic: Topic!
+
+  """
+  The HTTP URL for this repository-topic.
+  """
+  url: URI!
+}
+
+"""
+The connection type for RepositoryTopic.
+"""
+type RepositoryTopicConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryTopicEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [RepositoryTopic]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type RepositoryTopicEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: RepositoryTopic
+}
+
+"""
+The repository's visibility level.
+"""
+enum RepositoryVisibility {
+  """
+  The repository is visible only to users in the same business.
+  """
+  INTERNAL
+
+  """
+  The repository is visible only to those with explicit access.
+  """
+  PRIVATE
+
+  """
+  The repository is visible to everyone.
+  """
+  PUBLIC
+}
+
+"""
+Audit log entry for a repository_visibility_change.disable event.
+"""
+type RepositoryVisibilityChangeDisableAuditEntry implements AuditEntry & EnterpriseAuditEntryData & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+
+  """
+  The Node ID of the RepositoryVisibilityChangeDisableAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repository_visibility_change.enable event.
+"""
+type RepositoryVisibilityChangeEnableAuditEntry implements AuditEntry & EnterpriseAuditEntryData & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+
+  """
+  The Node ID of the RepositoryVisibilityChangeEnableAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+A Dependabot alert for a repository with a dependency affected by a security vulnerability.
+"""
+type RepositoryVulnerabilityAlert implements Node & RepositoryNode {
+  """
+  When was the alert auto-dismissed?
+  """
+  autoDismissedAt: DateTime
+
+  """
+  When was the alert created?
+  """
+  createdAt: DateTime!
+
+  """
+  The associated Dependabot update
+  """
+  dependabotUpdate: DependabotUpdate
+
+  """
+  The scope of an alert's dependency
+  """
+  dependencyScope: RepositoryVulnerabilityAlertDependencyScope
+
+  """
+  Comment explaining the reason the alert was dismissed
+  """
+  dismissComment: String
+
+  """
+  The reason the alert was dismissed
+  """
+  dismissReason: String
+
+  """
+  When was the alert dismissed?
+  """
+  dismissedAt: DateTime
+
+  """
+  The user who dismissed the alert
+  """
+  dismisser: User
+
+  """
+  When was the alert fixed?
+  """
+  fixedAt: DateTime
+
+  """
+  The Node ID of the RepositoryVulnerabilityAlert object
+  """
+  id: ID!
+
+  """
+  Identifies the alert number.
+  """
+  number: Int!
+
+  """
+  The associated repository
+  """
+  repository: Repository!
+
+  """
+  The associated security advisory
+  """
+  securityAdvisory: SecurityAdvisory
+
+  """
+  The associated security vulnerability
+  """
+  securityVulnerability: SecurityVulnerability
+
+  """
+  Identifies the state of the alert.
+  """
+  state: RepositoryVulnerabilityAlertState!
+
+  """
+  The vulnerable manifest filename
+  """
+  vulnerableManifestFilename: String!
+
+  """
+  The vulnerable manifest path
+  """
+  vulnerableManifestPath: String!
+
+  """
+  The vulnerable requirements
+  """
+  vulnerableRequirements: String
+}
+
+"""
+The connection type for RepositoryVulnerabilityAlert.
+"""
+type RepositoryVulnerabilityAlertConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryVulnerabilityAlertEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [RepositoryVulnerabilityAlert]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+The possible scopes of an alert's dependency.
+"""
+enum RepositoryVulnerabilityAlertDependencyScope {
+  """
+  A dependency that is only used in development
+  """
+  DEVELOPMENT
+
+  """
+  A dependency that is leveraged during application runtime
+  """
+  RUNTIME
+}
+
+"""
+An edge in a connection.
+"""
+type RepositoryVulnerabilityAlertEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: RepositoryVulnerabilityAlert
+}
+
+"""
+The possible states of an alert
+"""
+enum RepositoryVulnerabilityAlertState {
+  """
+  An alert that has been automatically closed by Dependabot.
+  """
+  AUTO_DISMISSED
+
+  """
+  An alert that has been manually closed by a user.
+  """
+  DISMISSED
+
+  """
+  An alert that has been resolved by a code change.
+  """
+  FIXED
+
+  """
+  An alert that is still open.
+  """
+  OPEN
+}
+
+"""
+Autogenerated input type of RequestReviews
+"""
+input RequestReviewsInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the pull request to modify.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+
+  """
+  The Node IDs of the team to request.
+  """
+  teamIds: [ID!] @possibleTypes(concreteTypes: ["Team"])
+
+  """
+  Add users to the set rather than replace.
+  """
+  union: Boolean = false
+
+  """
+  The Node IDs of the user to request.
+  """
+  userIds: [ID!] @possibleTypes(concreteTypes: ["User"])
+}
+
+"""
+Autogenerated return type of RequestReviews
+"""
+type RequestReviewsPayload {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request that is getting requests.
+  """
+  pullRequest: PullRequest
+
+  """
+  The edge from the pull request to the requested reviewers.
+  """
+  requestedReviewersEdge: UserEdge
+}
+
+"""
+The possible states that can be requested when creating a check run.
+"""
+enum RequestableCheckStatusState {
+  """
+  The check suite or run has been completed.
+  """
+  COMPLETED
+
+  """
+  The check suite or run is in progress.
+  """
+  IN_PROGRESS
+
+  """
+  The check suite or run is in pending state.
+  """
+  PENDING
+
+  """
+  The check suite or run has been queued.
+  """
+  QUEUED
+
+  """
+  The check suite or run is in waiting state.
+  """
+  WAITING
+}
+
+"""
+Types that can be requested reviewers.
+"""
+union RequestedReviewer = Bot | Mannequin | Team | User
+
+"""
+The connection type for RequestedReviewer.
+"""
+type RequestedReviewerConnection {
+  """
+  A list of edges.
+  """
+  edges: [RequestedReviewerEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [RequestedReviewer]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type RequestedReviewerEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: RequestedReviewer
+}
+
+"""
+Represents a type that can be required by a pull request for merging.
+"""
+interface RequirableByPullRequest {
+  """
+  Whether this is required to pass before merging for a specific pull request.
+  """
+  isRequired(
+    """
+    The id of the pull request this is required for
+    """
+    pullRequestId: ID
+
+    """
+    The number of the pull request this is required for
+    """
+    pullRequestNumber: Int
+  ): Boolean!
+}
+
+"""
+Choose which environments must be successfully deployed to before refs can be pushed into a ref that matches this rule.
+"""
+type RequiredDeploymentsParameters {
+  """
+  The environments that must be successfully deployed to before branches can be merged.
+  """
+  requiredDeploymentEnvironments: [String!]!
+}
+
+"""
+Choose which environments must be successfully deployed to before refs can be pushed into a ref that matches this rule.
+"""
+input RequiredDeploymentsParametersInput {
+  """
+  The environments that must be successfully deployed to before branches can be merged.
+  """
+  requiredDeploymentEnvironments: [String!]!
+}
+
+"""
+Represents a required status check for a protected branch, but not any specific run of that check.
+"""
+type RequiredStatusCheckDescription {
+  """
+  The App that must provide this status in order for it to be accepted.
+  """
+  app: App
+
+  """
+  The name of this status.
+  """
+  context: String!
+}
+
+"""
+Specifies the attributes for a new or updated required status check.
+"""
+input RequiredStatusCheckInput {
+  """
+  The ID of the App that must set the status in order for it to be accepted.
+  Omit this value to use whichever app has recently been setting this status, or
+  use "any" to allow any app to set the status.
+  """
+  appId: ID
+
+  """
+  Status check context that must pass for commits to be accepted to the matching branch.
+  """
+  context: String!
+}
+
+"""
+Choose which status checks must pass before the ref is updated. When enabled,
+commits must first be pushed to another ref where the checks pass.
+"""
+type RequiredStatusChecksParameters {
+  """
+  Status checks that are required.
+  """
+  requiredStatusChecks: [StatusCheckConfiguration!]!
+
+  """
+  Whether pull requests targeting a matching branch must be tested with the
+  latest code. This setting will not take effect unless at least one status
+  check is enabled.
+  """
+  strictRequiredStatusChecksPolicy: Boolean!
+}
+
+"""
+Choose which status checks must pass before the ref is updated. When enabled,
+commits must first be pushed to another ref where the checks pass.
+"""
+input RequiredStatusChecksParametersInput {
+  """
+  Status checks that are required.
+  """
+  requiredStatusChecks: [StatusCheckConfigurationInput!]!
+
+  """
+  Whether pull requests targeting a matching branch must be tested with the
+  latest code. This setting will not take effect unless at least one status
+  check is enabled.
+  """
+  strictRequiredStatusChecksPolicy: Boolean!
+}
+
+"""
+Autogenerated input type of RerequestCheckSuite
+"""
+input RerequestCheckSuiteInput {
+  """
+  The Node ID of the check suite.
+  """
+  checkSuiteId: ID! @possibleTypes(concreteTypes: ["CheckSuite"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of RerequestCheckSuite
+"""
+type RerequestCheckSuitePayload {
+  """
+  The requested check suite.
+  """
+  checkSuite: CheckSuite
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of ResolveReviewThread
+"""
+input ResolveReviewThreadInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the thread to resolve
+  """
+  threadId: ID! @possibleTypes(concreteTypes: ["PullRequestReviewThread"])
+}
+
+"""
+Autogenerated return type of ResolveReviewThread
+"""
+type ResolveReviewThreadPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The thread to resolve.
+  """
+  thread: PullRequestReviewThread
+}
+
+"""
+Represents a private contribution a user made on GitHub.
+"""
+type RestrictedContribution implements Contribution {
+  """
+  Whether this contribution is associated with a record you do not have access to. For
+  example, your own 'first issue' contribution may have been made on a repository you can no
+  longer access.
+  """
+  isRestricted: Boolean!
+
+  """
+  When this contribution was made.
+  """
+  occurredAt: DateTime!
+
+  """
+  The HTTP path for this contribution.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this contribution.
+  """
+  url: URI!
+
+  """
+  The user who made this contribution.
+  """
+  user: User!
+}
+
+"""
+Autogenerated input type of RetireSponsorsTier
+"""
+input RetireSponsorsTierInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the published tier to retire.
+  """
+  tierId: ID! @possibleTypes(concreteTypes: ["SponsorsTier"])
+}
+
+"""
+Autogenerated return type of RetireSponsorsTier
+"""
+type RetireSponsorsTierPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The tier that was retired.
+  """
+  sponsorsTier: SponsorsTier
+}
+
+"""
+Autogenerated input type of RevertPullRequest
+"""
+input RevertPullRequestInput {
+  """
+  The description of the revert pull request.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Indicates whether the revert pull request should be a draft.
+  """
+  draft: Boolean = false
+
+  """
+  The ID of the pull request to revert.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+
+  """
+  The title of the revert pull request.
+  """
+  title: String
+}
+
+"""
+Autogenerated return type of RevertPullRequest
+"""
+type RevertPullRequestPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request that was reverted.
+  """
+  pullRequest: PullRequest
+
+  """
+  The new pull request that reverts the input pull request.
+  """
+  revertPullRequest: PullRequest
+}
+
+"""
+A user, team, or app who has the ability to dismiss a review on a protected branch.
+"""
+type ReviewDismissalAllowance implements Node {
+  """
+  The actor that can dismiss.
+  """
+  actor: ReviewDismissalAllowanceActor
+
+  """
+  Identifies the branch protection rule associated with the allowed user, team, or app.
+  """
+  branchProtectionRule: BranchProtectionRule
+
+  """
+  The Node ID of the ReviewDismissalAllowance object
+  """
+  id: ID!
+}
+
+"""
+Types that can be an actor.
+"""
+union ReviewDismissalAllowanceActor = App | Team | User
+
+"""
+The connection type for ReviewDismissalAllowance.
+"""
+type ReviewDismissalAllowanceConnection {
+  """
+  A list of edges.
+  """
+  edges: [ReviewDismissalAllowanceEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ReviewDismissalAllowance]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ReviewDismissalAllowanceEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ReviewDismissalAllowance
+}
+
+"""
+Represents a 'review_dismissed' event on a given issue or pull request.
+"""
+type ReviewDismissedEvent implements Node & UniformResourceLocatable {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  Identifies the optional message associated with the 'review_dismissed' event.
+  """
+  dismissalMessage: String
+
+  """
+  Identifies the optional message associated with the event, rendered to HTML.
+  """
+  dismissalMessageHTML: String
+
+  """
+  The Node ID of the ReviewDismissedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the previous state of the review with the 'review_dismissed' event.
+  """
+  previousReviewState: PullRequestReviewState!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  Identifies the commit which caused the review to become stale.
+  """
+  pullRequestCommit: PullRequestCommit
+
+  """
+  The HTTP path for this review dismissed event.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the review associated with the 'review_dismissed' event.
+  """
+  review: PullRequestReview
+
+  """
+  The HTTP URL for this review dismissed event.
+  """
+  url: URI!
+}
+
+"""
+A request for a user to review a pull request.
+"""
+type ReviewRequest implements Node {
+  """
+  Whether this request was created for a code owner
+  """
+  asCodeOwner: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the ReviewRequest object
+  """
+  id: ID!
+
+  """
+  Identifies the pull request associated with this review request.
+  """
+  pullRequest: PullRequest!
+
+  """
+  The reviewer that is requested.
+  """
+  requestedReviewer: RequestedReviewer
+}
+
+"""
+The connection type for ReviewRequest.
+"""
+type ReviewRequestConnection {
+  """
+  A list of edges.
+  """
+  edges: [ReviewRequestEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ReviewRequest]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ReviewRequestEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ReviewRequest
+}
+
+"""
+Represents an 'review_request_removed' event on a given pull request.
+"""
+type ReviewRequestRemovedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the ReviewRequestRemovedEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  Identifies the reviewer whose review request was removed.
+  """
+  requestedReviewer: RequestedReviewer
+}
+
+"""
+Represents an 'review_requested' event on a given pull request.
+"""
+type ReviewRequestedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the ReviewRequestedEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  Identifies the reviewer whose review was requested.
+  """
+  requestedReviewer: RequestedReviewer
+}
+
+"""
+A hovercard context with a message describing the current code review state of the pull
+request.
+"""
+type ReviewStatusHovercardContext implements HovercardContext {
+  """
+  A string describing this context
+  """
+  message: String!
+
+  """
+  An octicon to accompany this context
+  """
+  octicon: String!
+
+  """
+  The current status of the pull request with respect to code review.
+  """
+  reviewDecision: PullRequestReviewDecision
+}
+
+"""
+Autogenerated input type of RevokeEnterpriseOrganizationsMigratorRole
+"""
+input RevokeEnterpriseOrganizationsMigratorRoleInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise to which all organizations managed by it will be granted the migrator role.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The login of the user to revoke the migrator role
+  """
+  login: String!
+}
+
+"""
+Autogenerated return type of RevokeEnterpriseOrganizationsMigratorRole
+"""
+type RevokeEnterpriseOrganizationsMigratorRolePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The organizations that had the migrator role revoked for the given user.
+  """
+  organizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): OrganizationConnection
+}
+
+"""
+Autogenerated input type of RevokeMigratorRole
+"""
+input RevokeMigratorRoleInput {
+  """
+  The user login or Team slug to revoke the migrator role from.
+  """
+  actor: String!
+
+  """
+  Specifies the type of the actor, can be either USER or TEAM.
+  """
+  actorType: ActorType!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the organization that the user/team belongs to.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of RevokeMigratorRole
+"""
+type RevokeMigratorRolePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Did the operation succeed?
+  """
+  success: Boolean
+}
+
+"""
+Possible roles a user may have in relation to an organization.
+"""
+enum RoleInOrganization {
+  """
+  A user who is a direct member of the organization.
+  """
+  DIRECT_MEMBER
+
+  """
+  A user with full administrative access to the organization.
+  """
+  OWNER
+
+  """
+  A user who is unaffiliated with the organization.
+  """
+  UNAFFILIATED
+}
+
+"""
+The level of enforcement for a rule or ruleset.
+"""
+enum RuleEnforcement {
+  """
+  Rules will be enforced
+  """
+  ACTIVE
+
+  """
+  Do not evaluate or enforce rules
+  """
+  DISABLED
+
+  """
+  Allow admins to test rules before enforcing them. Admins can view insights on
+  the Rule Insights page (`evaluate` is only available with GitHub Enterprise).
+  """
+  EVALUATE
+}
+
+"""
+Types which can be parameters for `RepositoryRule` objects.
+"""
+union RuleParameters =
+    BranchNamePatternParameters
+  | CommitAuthorEmailPatternParameters
+  | CommitMessagePatternParameters
+  | CommitterEmailPatternParameters
+  | PullRequestParameters
+  | RequiredDeploymentsParameters
+  | RequiredStatusChecksParameters
+  | TagNamePatternParameters
+  | UpdateParameters
+  | WorkflowsParameters
+
+"""
+Specifies the parameters for a `RepositoryRule` object. Only one of the fields should be specified.
+"""
+input RuleParametersInput {
+  """
+  Parameters used for the `branch_name_pattern` rule type
+  """
+  branchNamePattern: BranchNamePatternParametersInput
+
+  """
+  Parameters used for the `commit_author_email_pattern` rule type
+  """
+  commitAuthorEmailPattern: CommitAuthorEmailPatternParametersInput
+
+  """
+  Parameters used for the `commit_message_pattern` rule type
+  """
+  commitMessagePattern: CommitMessagePatternParametersInput
+
+  """
+  Parameters used for the `committer_email_pattern` rule type
+  """
+  committerEmailPattern: CommitterEmailPatternParametersInput
+
+  """
+  Parameters used for the `pull_request` rule type
+  """
+  pullRequest: PullRequestParametersInput
+
+  """
+  Parameters used for the `required_deployments` rule type
+  """
+  requiredDeployments: RequiredDeploymentsParametersInput
+
+  """
+  Parameters used for the `required_status_checks` rule type
+  """
+  requiredStatusChecks: RequiredStatusChecksParametersInput
+
+  """
+  Parameters used for the `tag_name_pattern` rule type
+  """
+  tagNamePattern: TagNamePatternParametersInput
+
+  """
+  Parameters used for the `update` rule type
+  """
+  update: UpdateParametersInput
+
+  """
+  Parameters used for the `workflows` rule type
+  """
+  workflows: WorkflowsParametersInput
+}
+
+"""
+Types which can have `RepositoryRule` objects.
+"""
+union RuleSource = Organization | Repository
+
+"""
+The possible digest algorithms used to sign SAML requests for an identity provider.
+"""
+enum SamlDigestAlgorithm {
+  """
+  SHA1
+  """
+  SHA1
+
+  """
+  SHA256
+  """
+  SHA256
+
+  """
+  SHA384
+  """
+  SHA384
+
+  """
+  SHA512
+  """
+  SHA512
+}
+
+"""
+The possible signature algorithms used to sign SAML requests for a Identity Provider.
+"""
+enum SamlSignatureAlgorithm {
+  """
+  RSA-SHA1
+  """
+  RSA_SHA1
+
+  """
+  RSA-SHA256
+  """
+  RSA_SHA256
+
+  """
+  RSA-SHA384
+  """
+  RSA_SHA384
+
+  """
+  RSA-SHA512
+  """
+  RSA_SHA512
+}
+
+"""
+A Saved Reply is text a user can use to reply quickly.
+"""
+type SavedReply implements Node {
+  """
+  The body of the saved reply.
+  """
+  body: String!
+
+  """
+  The saved reply body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the SavedReply object
+  """
+  id: ID!
+
+  """
+  The title of the saved reply.
+  """
+  title: String!
+
+  """
+  The user that saved this reply.
+  """
+  user: Actor
+}
+
+"""
+The connection type for SavedReply.
+"""
+type SavedReplyConnection {
+  """
+  A list of edges.
+  """
+  edges: [SavedReplyEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [SavedReply]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SavedReplyEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SavedReply
+}
+
+"""
+Ordering options for saved reply connections.
+"""
+input SavedReplyOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order saved replies by.
+  """
+  field: SavedReplyOrderField!
+}
+
+"""
+Properties by which saved reply connections can be ordered.
+"""
+enum SavedReplyOrderField {
+  """
+  Order saved reply by when they were updated.
+  """
+  UPDATED_AT
+}
+
+"""
+The results of a search.
+"""
+union SearchResultItem = App | Discussion | Issue | MarketplaceListing | Organization | PullRequest | Repository | User
+
+"""
+A list of results that matched against a search query. Regardless of the number
+of matches, a maximum of 1,000 results will be available across all types,
+potentially split across many pages.
+"""
+type SearchResultItemConnection {
+  """
+  The total number of pieces of code that matched the search query. Regardless
+  of the total number of matches, a maximum of 1,000 results will be available
+  across all types.
+  """
+  codeCount: Int!
+
+  """
+  The total number of discussions that matched the search query. Regardless of
+  the total number of matches, a maximum of 1,000 results will be available
+  across all types.
+  """
+  discussionCount: Int!
+
+  """
+  A list of edges.
+  """
+  edges: [SearchResultItemEdge]
+
+  """
+  The total number of issues that matched the search query. Regardless of the
+  total number of matches, a maximum of 1,000 results will be available across all types.
+  """
+  issueCount: Int!
+
+  """
+  A list of nodes.
+  """
+  nodes: [SearchResultItem]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  The total number of repositories that matched the search query. Regardless of
+  the total number of matches, a maximum of 1,000 results will be available
+  across all types.
+  """
+  repositoryCount: Int!
+
+  """
+  The total number of users that matched the search query. Regardless of the
+  total number of matches, a maximum of 1,000 results will be available across all types.
+  """
+  userCount: Int!
+
+  """
+  The total number of wiki pages that matched the search query. Regardless of
+  the total number of matches, a maximum of 1,000 results will be available
+  across all types.
+  """
+  wikiCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SearchResultItemEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SearchResultItem
+
+  """
+  Text matches on the result found.
+  """
+  textMatches: [TextMatch]
+}
+
+"""
+Represents the individual results of a search.
+"""
+enum SearchType {
+  """
+  Returns matching discussions in repositories.
+  """
+  DISCUSSION
+
+  """
+  Returns results matching issues in repositories.
+  """
+  ISSUE
+
+  """
+  Returns results matching repositories.
+  """
+  REPOSITORY
+
+  """
+  Returns results matching users and organizations on GitHub.
+  """
+  USER
+}
+
+"""
+A GitHub Security Advisory
+"""
+type SecurityAdvisory implements Node {
+  """
+  The classification of the advisory
+  """
+  classification: SecurityAdvisoryClassification!
+
+  """
+  The CVSS associated with this advisory
+  """
+  cvss: CVSS!
+
+  """
+  CWEs associated with this Advisory
+  """
+  cwes(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CWEConnection!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  This is a long plaintext description of the advisory
+  """
+  description: String!
+
+  """
+  The GitHub Security Advisory ID
+  """
+  ghsaId: String!
+
+  """
+  The Node ID of the SecurityAdvisory object
+  """
+  id: ID!
+
+  """
+  A list of identifiers for this advisory
+  """
+  identifiers: [SecurityAdvisoryIdentifier!]!
+
+  """
+  The permalink for the advisory's dependabot alerts page
+  """
+  notificationsPermalink: URI
+
+  """
+  The organization that originated the advisory
+  """
+  origin: String!
+
+  """
+  The permalink for the advisory
+  """
+  permalink: URI
+
+  """
+  When the advisory was published
+  """
+  publishedAt: DateTime!
+
+  """
+  A list of references for this advisory
+  """
+  references: [SecurityAdvisoryReference!]!
+
+  """
+  The severity of the advisory
+  """
+  severity: SecurityAdvisorySeverity!
+
+  """
+  A short plaintext summary of the advisory
+  """
+  summary: String!
+
+  """
+  When the advisory was last updated
+  """
+  updatedAt: DateTime!
+
+  """
+  Vulnerabilities associated with this Advisory
+  """
+  vulnerabilities(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    A list of advisory classifications to filter vulnerabilities by.
+    """
+    classifications: [SecurityAdvisoryClassification!]
+
+    """
+    An ecosystem to filter vulnerabilities by.
+    """
+    ecosystem: SecurityAdvisoryEcosystem
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the returned topics.
+    """
+    orderBy: SecurityVulnerabilityOrder = {field: UPDATED_AT, direction: DESC}
+
+    """
+    A package name to filter vulnerabilities by.
+    """
+    package: String
+
+    """
+    A list of severities to filter vulnerabilities by.
+    """
+    severities: [SecurityAdvisorySeverity!]
+  ): SecurityVulnerabilityConnection!
+
+  """
+  When the advisory was withdrawn, if it has been withdrawn
+  """
+  withdrawnAt: DateTime
+}
+
+"""
+Classification of the advisory.
+"""
+enum SecurityAdvisoryClassification {
+  """
+  Classification of general advisories.
+  """
+  GENERAL
+
+  """
+  Classification of malware advisories.
+  """
+  MALWARE
+}
+
+"""
+The connection type for SecurityAdvisory.
+"""
+type SecurityAdvisoryConnection {
+  """
+  A list of edges.
+  """
+  edges: [SecurityAdvisoryEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [SecurityAdvisory]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+The possible ecosystems of a security vulnerability's package.
+"""
+enum SecurityAdvisoryEcosystem {
+  """
+  GitHub Actions
+  """
+  ACTIONS
+
+  """
+  PHP packages hosted at packagist.org
+  """
+  COMPOSER
+
+  """
+  Erlang/Elixir packages hosted at hex.pm
+  """
+  ERLANG
+
+  """
+  Go modules
+  """
+  GO
+
+  """
+  Java artifacts hosted at the Maven central repository
+  """
+  MAVEN
+
+  """
+  JavaScript packages hosted at npmjs.com
+  """
+  NPM
+
+  """
+  .NET packages hosted at the NuGet Gallery
+  """
+  NUGET
+
+  """
+  Python packages hosted at PyPI.org
+  """
+  PIP
+
+  """
+  Dart packages hosted at pub.dev
+  """
+  PUB
+
+  """
+  Ruby gems hosted at RubyGems.org
+  """
+  RUBYGEMS
+
+  """
+  Rust crates
+  """
+  RUST
+
+  """
+  Swift packages
+  """
+  SWIFT
+}
+
+"""
+An edge in a connection.
+"""
+type SecurityAdvisoryEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SecurityAdvisory
+}
+
+"""
+A GitHub Security Advisory Identifier
+"""
+type SecurityAdvisoryIdentifier {
+  """
+  The identifier type, e.g. GHSA, CVE
+  """
+  type: String!
+
+  """
+  The identifier
+  """
+  value: String!
+}
+
+"""
+An advisory identifier to filter results on.
+"""
+input SecurityAdvisoryIdentifierFilter {
+  """
+  The identifier type.
+  """
+  type: SecurityAdvisoryIdentifierType!
+
+  """
+  The identifier string. Supports exact or partial matching.
+  """
+  value: String!
+}
+
+"""
+Identifier formats available for advisories.
+"""
+enum SecurityAdvisoryIdentifierType {
+  """
+  Common Vulnerabilities and Exposures Identifier.
+  """
+  CVE
+
+  """
+  GitHub Security Advisory ID.
+  """
+  GHSA
+}
+
+"""
+Ordering options for security advisory connections
+"""
+input SecurityAdvisoryOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order security advisories by.
+  """
+  field: SecurityAdvisoryOrderField!
+}
+
+"""
+Properties by which security advisory connections can be ordered.
+"""
+enum SecurityAdvisoryOrderField {
+  """
+  Order advisories by publication time
+  """
+  PUBLISHED_AT
+
+  """
+  Order advisories by update time
+  """
+  UPDATED_AT
+}
+
+"""
+An individual package
+"""
+type SecurityAdvisoryPackage {
+  """
+  The ecosystem the package belongs to, e.g. RUBYGEMS, NPM
+  """
+  ecosystem: SecurityAdvisoryEcosystem!
+
+  """
+  The package name
+  """
+  name: String!
+}
+
+"""
+An individual package version
+"""
+type SecurityAdvisoryPackageVersion {
+  """
+  The package name or version
+  """
+  identifier: String!
+}
+
+"""
+A GitHub Security Advisory Reference
+"""
+type SecurityAdvisoryReference {
+  """
+  A publicly accessible reference
+  """
+  url: URI!
+}
+
+"""
+Severity of the vulnerability.
+"""
+enum SecurityAdvisorySeverity {
+  """
+  Critical.
+  """
+  CRITICAL
+
+  """
+  High.
+  """
+  HIGH
+
+  """
+  Low.
+  """
+  LOW
+
+  """
+  Moderate.
+  """
+  MODERATE
+}
+
+"""
+An individual vulnerability within an Advisory
+"""
+type SecurityVulnerability {
+  """
+  The Advisory associated with this Vulnerability
+  """
+  advisory: SecurityAdvisory!
+
+  """
+  The first version containing a fix for the vulnerability
+  """
+  firstPatchedVersion: SecurityAdvisoryPackageVersion
+
+  """
+  A description of the vulnerable package
+  """
+  package: SecurityAdvisoryPackage!
+
+  """
+  The severity of the vulnerability within this package
+  """
+  severity: SecurityAdvisorySeverity!
+
+  """
+  When the vulnerability was last updated
+  """
+  updatedAt: DateTime!
+
+  """
+  A string that describes the vulnerable package versions.
+  This string follows a basic syntax with a few forms.
+  + `= 0.2.0` denotes a single vulnerable version.
+  + `<= 1.0.8` denotes a version range up to and including the specified version
+  + `< 0.1.11` denotes a version range up to, but excluding, the specified version
+  + `>= 4.3.0, < 4.3.5` denotes a version range with a known minimum and maximum version.
+  + `>= 0.0.1` denotes a version range with a known minimum, but no known maximum
+  """
+  vulnerableVersionRange: String!
+}
+
+"""
+The connection type for SecurityVulnerability.
+"""
+type SecurityVulnerabilityConnection {
+  """
+  A list of edges.
+  """
+  edges: [SecurityVulnerabilityEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [SecurityVulnerability]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SecurityVulnerabilityEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SecurityVulnerability
+}
+
+"""
+Ordering options for security vulnerability connections
+"""
+input SecurityVulnerabilityOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order security vulnerabilities by.
+  """
+  field: SecurityVulnerabilityOrderField!
+}
+
+"""
+Properties by which security vulnerability connections can be ordered.
+"""
+enum SecurityVulnerabilityOrderField {
+  """
+  Order vulnerability by update time
+  """
+  UPDATED_AT
+}
+
+"""
+Autogenerated input type of SetEnterpriseIdentityProvider
+"""
+input SetEnterpriseIdentityProviderInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The digest algorithm used to sign SAML requests for the identity provider.
+  """
+  digestMethod: SamlDigestAlgorithm!
+
+  """
+  The ID of the enterprise on which to set an identity provider.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The x509 certificate used by the identity provider to sign assertions and responses.
+  """
+  idpCertificate: String!
+
+  """
+  The Issuer Entity ID for the SAML identity provider
+  """
+  issuer: String
+
+  """
+  The signature algorithm used to sign SAML requests for the identity provider.
+  """
+  signatureMethod: SamlSignatureAlgorithm!
+
+  """
+  The URL endpoint for the identity provider's SAML SSO.
+  """
+  ssoUrl: URI!
+}
+
+"""
+Autogenerated return type of SetEnterpriseIdentityProvider
+"""
+type SetEnterpriseIdentityProviderPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The identity provider for the enterprise.
+  """
+  identityProvider: EnterpriseIdentityProvider
+}
+
+"""
+Autogenerated input type of SetOrganizationInteractionLimit
+"""
+input SetOrganizationInteractionLimitInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  When this limit should expire.
+  """
+  expiry: RepositoryInteractionLimitExpiry
+
+  """
+  The limit to set.
+  """
+  limit: RepositoryInteractionLimit!
+
+  """
+  The ID of the organization to set a limit for.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of SetOrganizationInteractionLimit
+"""
+type SetOrganizationInteractionLimitPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The organization that the interaction limit was set for.
+  """
+  organization: Organization
+}
+
+"""
+Autogenerated input type of SetRepositoryInteractionLimit
+"""
+input SetRepositoryInteractionLimitInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  When this limit should expire.
+  """
+  expiry: RepositoryInteractionLimitExpiry
+
+  """
+  The limit to set.
+  """
+  limit: RepositoryInteractionLimit!
+
+  """
+  The ID of the repository to set a limit for.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of SetRepositoryInteractionLimit
+"""
+type SetRepositoryInteractionLimitPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The repository that the interaction limit was set for.
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of SetUserInteractionLimit
+"""
+input SetUserInteractionLimitInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  When this limit should expire.
+  """
+  expiry: RepositoryInteractionLimitExpiry
+
+  """
+  The limit to set.
+  """
+  limit: RepositoryInteractionLimit!
+
+  """
+  The ID of the user to set a limit for.
+  """
+  userId: ID! @possibleTypes(concreteTypes: ["User"])
+}
+
+"""
+Autogenerated return type of SetUserInteractionLimit
+"""
+type SetUserInteractionLimitPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The user that the interaction limit was set for.
+  """
+  user: User
+}
+
+"""
+Represents an S/MIME signature on a Commit or Tag.
+"""
+type SmimeSignature implements GitSignature {
+  """
+  Email used to sign this object.
+  """
+  email: String!
+
+  """
+  True if the signature is valid and verified by GitHub.
+  """
+  isValid: Boolean!
+
+  """
+  Payload for GPG signing object. Raw ODB object without the signature header.
+  """
+  payload: String!
+
+  """
+  ASCII-armored signature header from object.
+  """
+  signature: String!
+
+  """
+  GitHub user corresponding to the email signing this commit.
+  """
+  signer: User
+
+  """
+  The state of this signature. `VALID` if signature is valid and verified by
+  GitHub, otherwise represents reason why signature is considered invalid.
+  """
+  state: GitSignatureState!
+
+  """
+  True if the signature was made with GitHub's signing key.
+  """
+  wasSignedByGitHub: Boolean!
+}
+
+"""
+Social media profile associated with a user.
+"""
+type SocialAccount {
+  """
+  Name of the social media account as it appears on the profile.
+  """
+  displayName: String!
+
+  """
+  Software or company that hosts the social media account.
+  """
+  provider: SocialAccountProvider!
+
+  """
+  URL of the social media account.
+  """
+  url: URI!
+}
+
+"""
+The connection type for SocialAccount.
+"""
+type SocialAccountConnection {
+  """
+  A list of edges.
+  """
+  edges: [SocialAccountEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [SocialAccount]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SocialAccountEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SocialAccount
+}
+
+"""
+Software or company that hosts social media accounts.
+"""
+enum SocialAccountProvider {
+  """
+  Social media and networking website.
+  """
+  FACEBOOK
+
+  """
+  Catch-all for social media providers that do not yet have specific handling.
+  """
+  GENERIC
+
+  """
+  Fork of Mastodon with a greater focus on local posting.
+  """
+  HOMETOWN
+
+  """
+  Social media website with a focus on photo and video sharing.
+  """
+  INSTAGRAM
+
+  """
+  Professional networking website.
+  """
+  LINKEDIN
+
+  """
+  Open-source federated microblogging service.
+  """
+  MASTODON
+
+  """
+  JavaScript package registry.
+  """
+  NPM
+
+  """
+  Social news aggregation and discussion website.
+  """
+  REDDIT
+
+  """
+  Live-streaming service.
+  """
+  TWITCH
+
+  """
+  Microblogging website.
+  """
+  TWITTER
+
+  """
+  Online video platform.
+  """
+  YOUTUBE
+}
+
+"""
+Entities that can sponsor others via GitHub Sponsors
+"""
+union Sponsor = Organization | User
+
+"""
+The connection type for Sponsor.
+"""
+type SponsorConnection {
+  """
+  A list of edges.
+  """
+  edges: [SponsorEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Sponsor]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a user or organization who is sponsoring someone in GitHub Sponsors.
+"""
+type SponsorEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Sponsor
+}
+
+"""
+Ordering options for connections to get sponsor entities for GitHub Sponsors.
+"""
+input SponsorOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order sponsor entities by.
+  """
+  field: SponsorOrderField!
+}
+
+"""
+Properties by which sponsor connections can be ordered.
+"""
+enum SponsorOrderField {
+  """
+  Order sponsorable entities by login (username).
+  """
+  LOGIN
+
+  """
+  Order sponsors by their relevance to the viewer.
+  """
+  RELEVANCE
+}
+
+"""
+Entities that can sponsor or be sponsored through GitHub Sponsors.
+"""
+interface Sponsorable {
+  """
+  The estimated next GitHub Sponsors payout for this user/organization in cents (USD).
+  """
+  estimatedNextSponsorsPayoutInCents: Int!
+
+  """
+  True if this user/organization has a GitHub Sponsors listing.
+  """
+  hasSponsorsListing: Boolean!
+
+  """
+  Whether the given account is sponsoring this user/organization.
+  """
+  isSponsoredBy(
+    """
+    The target account's login.
+    """
+    accountLogin: String!
+  ): Boolean!
+
+  """
+  True if the viewer is sponsored by this user/organization.
+  """
+  isSponsoringViewer: Boolean!
+
+  """
+  The estimated monthly GitHub Sponsors income for this user/organization in cents (USD).
+  """
+  monthlyEstimatedSponsorsIncomeInCents: Int!
+
+  """
+  List of users and organizations this entity is sponsoring.
+  """
+  sponsoring(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the users and organizations returned from the connection.
+    """
+    orderBy: SponsorOrder = {field: RELEVANCE, direction: DESC}
+  ): SponsorConnection!
+
+  """
+  List of sponsors for this user or organization.
+  """
+  sponsors(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsors returned from the connection.
+    """
+    orderBy: SponsorOrder = {field: RELEVANCE, direction: DESC}
+
+    """
+    If given, will filter for sponsors at the given tier. Will only return
+    sponsors whose tier the viewer is permitted to see.
+    """
+    tierId: ID
+  ): SponsorConnection!
+
+  """
+  Events involving this sponsorable, such as new sponsorships.
+  """
+  sponsorsActivities(
+    """
+    Filter activities to only the specified actions.
+    """
+    actions: [SponsorsActivityAction!] = []
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether to include those events where this sponsorable acted as the sponsor.
+    Defaults to only including events where this sponsorable was the recipient
+    of a sponsorship.
+    """
+    includeAsSponsor: Boolean = false
+
+    """
+    Whether or not to include private activities in the result set. Defaults to including public and private activities.
+    """
+    includePrivate: Boolean = true
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for activity returned from the connection.
+    """
+    orderBy: SponsorsActivityOrder = {field: TIMESTAMP, direction: DESC}
+
+    """
+    Filter activities returned to only those that occurred in the most recent
+    specified time period. Set to ALL to avoid filtering by when the activity
+    occurred. Will be ignored if `since` or `until` is given.
+    """
+    period: SponsorsActivityPeriod = MONTH
+
+    """
+    Filter activities to those that occurred on or after this time.
+    """
+    since: DateTime
+
+    """
+    Filter activities to those that occurred before this time.
+    """
+    until: DateTime
+  ): SponsorsActivityConnection!
+
+  """
+  The GitHub Sponsors listing for this user or organization.
+  """
+  sponsorsListing: SponsorsListing
+
+  """
+  The sponsorship from the viewer to this user/organization; that is, the sponsorship where you're the sponsor.
+  """
+  sponsorshipForViewerAsSponsor(
+    """
+    Whether to return the sponsorship only if it's still active. Pass false to
+    get the viewer's sponsorship back even if it has been cancelled.
+    """
+    activeOnly: Boolean = true
+  ): Sponsorship
+
+  """
+  The sponsorship from this user/organization to the viewer; that is, the sponsorship you're receiving.
+  """
+  sponsorshipForViewerAsSponsorable(
+    """
+    Whether to return the sponsorship only if it's still active. Pass false to
+    get the sponsorship back even if it has been cancelled.
+    """
+    activeOnly: Boolean = true
+  ): Sponsorship
+
+  """
+  List of sponsorship updates sent from this sponsorable to sponsors.
+  """
+  sponsorshipNewsletters(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsorship updates returned from the connection.
+    """
+    orderBy: SponsorshipNewsletterOrder = {field: CREATED_AT, direction: DESC}
+  ): SponsorshipNewsletterConnection!
+
+  """
+  The sponsorships where this user or organization is the maintainer receiving the funds.
+  """
+  sponsorshipsAsMaintainer(
+    """
+    Whether to include only sponsorships that are active right now, versus all
+    sponsorships this maintainer has ever received.
+    """
+    activeOnly: Boolean = true
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether or not to include private sponsorships in the result set
+    """
+    includePrivate: Boolean = false
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsorships returned from this connection. If left
+    blank, the sponsorships will be ordered based on relevancy to the viewer.
+    """
+    orderBy: SponsorshipOrder
+  ): SponsorshipConnection!
+
+  """
+  The sponsorships where this user or organization is the funder.
+  """
+  sponsorshipsAsSponsor(
+    """
+    Whether to include only sponsorships that are active right now, versus all sponsorships this sponsor has ever made.
+    """
+    activeOnly: Boolean = true
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter sponsorships returned to those for the specified maintainers. That
+    is, the recipient of the sponsorship is a user or organization with one of
+    the given logins.
+    """
+    maintainerLogins: [String!]
+
+    """
+    Ordering options for sponsorships returned from this connection. If left
+    blank, the sponsorships will be ordered based on relevancy to the viewer.
+    """
+    orderBy: SponsorshipOrder
+  ): SponsorshipConnection!
+
+  """
+  The amount in United States cents (e.g., 500 = $5.00 USD) that this entity has
+  spent on GitHub to fund sponsorships. Only returns a value when viewed by the
+  user themselves or by a user who can manage sponsorships for the requested organization.
+  """
+  totalSponsorshipAmountAsSponsorInCents(
+    """
+    Filter payments to those that occurred on or after this time.
+    """
+    since: DateTime
+
+    """
+    Filter payments to those made to the users or organizations with the specified usernames.
+    """
+    sponsorableLogins: [String!] = []
+
+    """
+    Filter payments to those that occurred before this time.
+    """
+    until: DateTime
+  ): Int
+
+  """
+  Whether or not the viewer is able to sponsor this user/organization.
+  """
+  viewerCanSponsor: Boolean!
+
+  """
+  True if the viewer is sponsoring this user/organization.
+  """
+  viewerIsSponsoring: Boolean!
+}
+
+"""
+Entities that can be sponsored via GitHub Sponsors
+"""
+union SponsorableItem = Organization | User
+
+"""
+The connection type for SponsorableItem.
+"""
+type SponsorableItemConnection {
+  """
+  A list of edges.
+  """
+  edges: [SponsorableItemEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [SponsorableItem]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SponsorableItemEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SponsorableItem
+}
+
+"""
+Ordering options for connections to get sponsorable entities for GitHub Sponsors.
+"""
+input SponsorableOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order sponsorable entities by.
+  """
+  field: SponsorableOrderField!
+}
+
+"""
+Properties by which sponsorable connections can be ordered.
+"""
+enum SponsorableOrderField {
+  """
+  Order sponsorable entities by login (username).
+  """
+  LOGIN
+}
+
+"""
+An event related to sponsorship activity.
+"""
+type SponsorsActivity implements Node {
+  """
+  What action this activity indicates took place.
+  """
+  action: SponsorsActivityAction!
+
+  """
+  The sponsor's current privacy level.
+  """
+  currentPrivacyLevel: SponsorshipPrivacy
+
+  """
+  The Node ID of the SponsorsActivity object
+  """
+  id: ID!
+
+  """
+  The platform that was used to pay for the sponsorship.
+  """
+  paymentSource: SponsorshipPaymentSource
+
+  """
+  The tier that the sponsorship used to use, for tier change events.
+  """
+  previousSponsorsTier: SponsorsTier
+
+  """
+  The user or organization who triggered this activity and was/is sponsoring the sponsorable.
+  """
+  sponsor: Sponsor
+
+  """
+  The user or organization that is being sponsored, the maintainer.
+  """
+  sponsorable: Sponsorable!
+
+  """
+  The associated sponsorship tier.
+  """
+  sponsorsTier: SponsorsTier
+
+  """
+  The timestamp of this event.
+  """
+  timestamp: DateTime
+
+  """
+  Was this sponsorship made alongside other sponsorships at the same time from the same sponsor?
+  """
+  viaBulkSponsorship: Boolean!
+}
+
+"""
+The possible actions that GitHub Sponsors activities can represent.
+"""
+enum SponsorsActivityAction {
+  """
+  The activity was cancelling a sponsorship.
+  """
+  CANCELLED_SPONSORSHIP
+
+  """
+  The activity was starting a sponsorship.
+  """
+  NEW_SPONSORSHIP
+
+  """
+  The activity was scheduling a downgrade or cancellation.
+  """
+  PENDING_CHANGE
+
+  """
+  The activity was funds being refunded to the sponsor or GitHub.
+  """
+  REFUND
+
+  """
+  The activity was disabling matching for a previously matched sponsorship.
+  """
+  SPONSOR_MATCH_DISABLED
+
+  """
+  The activity was changing the sponsorship tier, either directly by the sponsor or by a scheduled/pending change.
+  """
+  TIER_CHANGE
+}
+
+"""
+The connection type for SponsorsActivity.
+"""
+type SponsorsActivityConnection {
+  """
+  A list of edges.
+  """
+  edges: [SponsorsActivityEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [SponsorsActivity]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SponsorsActivityEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SponsorsActivity
+}
+
+"""
+Ordering options for GitHub Sponsors activity connections.
+"""
+input SponsorsActivityOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order activity by.
+  """
+  field: SponsorsActivityOrderField!
+}
+
+"""
+Properties by which GitHub Sponsors activity connections can be ordered.
+"""
+enum SponsorsActivityOrderField {
+  """
+  Order activities by when they happened.
+  """
+  TIMESTAMP
+}
+
+"""
+The possible time periods for which Sponsors activities can be requested.
+"""
+enum SponsorsActivityPeriod {
+  """
+  Don't restrict the activity to any date range, include all activity.
+  """
+  ALL
+
+  """
+  The previous calendar day.
+  """
+  DAY
+
+  """
+  The previous thirty days.
+  """
+  MONTH
+
+  """
+  The previous seven days.
+  """
+  WEEK
+}
+
+"""
+Represents countries or regions for billing and residence for a GitHub Sponsors profile.
+"""
+enum SponsorsCountryOrRegionCode {
+  """
+  Andorra
+  """
+  AD
+
+  """
+  United Arab Emirates
+  """
+  AE
+
+  """
+  Afghanistan
+  """
+  AF
+
+  """
+  Antigua and Barbuda
+  """
+  AG
+
+  """
+  Anguilla
+  """
+  AI
+
+  """
+  Albania
+  """
+  AL
+
+  """
+  Armenia
+  """
+  AM
+
+  """
+  Angola
+  """
+  AO
+
+  """
+  Antarctica
+  """
+  AQ
+
+  """
+  Argentina
+  """
+  AR
+
+  """
+  American Samoa
+  """
+  AS
+
+  """
+  Austria
+  """
+  AT
+
+  """
+  Australia
+  """
+  AU
+
+  """
+  Aruba
+  """
+  AW
+
+  """
+  Åland
+  """
+  AX
+
+  """
+  Azerbaijan
+  """
+  AZ
+
+  """
+  Bosnia and Herzegovina
+  """
+  BA
+
+  """
+  Barbados
+  """
+  BB
+
+  """
+  Bangladesh
+  """
+  BD
+
+  """
+  Belgium
+  """
+  BE
+
+  """
+  Burkina Faso
+  """
+  BF
+
+  """
+  Bulgaria
+  """
+  BG
+
+  """
+  Bahrain
+  """
+  BH
+
+  """
+  Burundi
+  """
+  BI
+
+  """
+  Benin
+  """
+  BJ
+
+  """
+  Saint Barthélemy
+  """
+  BL
+
+  """
+  Bermuda
+  """
+  BM
+
+  """
+  Brunei Darussalam
+  """
+  BN
+
+  """
+  Bolivia
+  """
+  BO
+
+  """
+  Bonaire, Sint Eustatius and Saba
+  """
+  BQ
+
+  """
+  Brazil
+  """
+  BR
+
+  """
+  Bahamas
+  """
+  BS
+
+  """
+  Bhutan
+  """
+  BT
+
+  """
+  Bouvet Island
+  """
+  BV
+
+  """
+  Botswana
+  """
+  BW
+
+  """
+  Belarus
+  """
+  BY
+
+  """
+  Belize
+  """
+  BZ
+
+  """
+  Canada
+  """
+  CA
+
+  """
+  Cocos (Keeling) Islands
+  """
+  CC
+
+  """
+  Congo (Kinshasa)
+  """
+  CD
+
+  """
+  Central African Republic
+  """
+  CF
+
+  """
+  Congo (Brazzaville)
+  """
+  CG
+
+  """
+  Switzerland
+  """
+  CH
+
+  """
+  Côte d'Ivoire
+  """
+  CI
+
+  """
+  Cook Islands
+  """
+  CK
+
+  """
+  Chile
+  """
+  CL
+
+  """
+  Cameroon
+  """
+  CM
+
+  """
+  China
+  """
+  CN
+
+  """
+  Colombia
+  """
+  CO
+
+  """
+  Costa Rica
+  """
+  CR
+
+  """
+  Cape Verde
+  """
+  CV
+
+  """
+  Curaçao
+  """
+  CW
+
+  """
+  Christmas Island
+  """
+  CX
+
+  """
+  Cyprus
+  """
+  CY
+
+  """
+  Czech Republic
+  """
+  CZ
+
+  """
+  Germany
+  """
+  DE
+
+  """
+  Djibouti
+  """
+  DJ
+
+  """
+  Denmark
+  """
+  DK
+
+  """
+  Dominica
+  """
+  DM
+
+  """
+  Dominican Republic
+  """
+  DO
+
+  """
+  Algeria
+  """
+  DZ
+
+  """
+  Ecuador
+  """
+  EC
+
+  """
+  Estonia
+  """
+  EE
+
+  """
+  Egypt
+  """
+  EG
+
+  """
+  Western Sahara
+  """
+  EH
+
+  """
+  Eritrea
+  """
+  ER
+
+  """
+  Spain
+  """
+  ES
+
+  """
+  Ethiopia
+  """
+  ET
+
+  """
+  Finland
+  """
+  FI
+
+  """
+  Fiji
+  """
+  FJ
+
+  """
+  Falkland Islands
+  """
+  FK
+
+  """
+  Micronesia
+  """
+  FM
+
+  """
+  Faroe Islands
+  """
+  FO
+
+  """
+  France
+  """
+  FR
+
+  """
+  Gabon
+  """
+  GA
+
+  """
+  United Kingdom
+  """
+  GB
+
+  """
+  Grenada
+  """
+  GD
+
+  """
+  Georgia
+  """
+  GE
+
+  """
+  French Guiana
+  """
+  GF
+
+  """
+  Guernsey
+  """
+  GG
+
+  """
+  Ghana
+  """
+  GH
+
+  """
+  Gibraltar
+  """
+  GI
+
+  """
+  Greenland
+  """
+  GL
+
+  """
+  Gambia
+  """
+  GM
+
+  """
+  Guinea
+  """
+  GN
+
+  """
+  Guadeloupe
+  """
+  GP
+
+  """
+  Equatorial Guinea
+  """
+  GQ
+
+  """
+  Greece
+  """
+  GR
+
+  """
+  South Georgia and South Sandwich Islands
+  """
+  GS
+
+  """
+  Guatemala
+  """
+  GT
+
+  """
+  Guam
+  """
+  GU
+
+  """
+  Guinea-Bissau
+  """
+  GW
+
+  """
+  Guyana
+  """
+  GY
+
+  """
+  Hong Kong
+  """
+  HK
+
+  """
+  Heard and McDonald Islands
+  """
+  HM
+
+  """
+  Honduras
+  """
+  HN
+
+  """
+  Croatia
+  """
+  HR
+
+  """
+  Haiti
+  """
+  HT
+
+  """
+  Hungary
+  """
+  HU
+
+  """
+  Indonesia
+  """
+  ID
+
+  """
+  Ireland
+  """
+  IE
+
+  """
+  Israel
+  """
+  IL
+
+  """
+  Isle of Man
+  """
+  IM
+
+  """
+  India
+  """
+  IN
+
+  """
+  British Indian Ocean Territory
+  """
+  IO
+
+  """
+  Iraq
+  """
+  IQ
+
+  """
+  Iran
+  """
+  IR
+
+  """
+  Iceland
+  """
+  IS
+
+  """
+  Italy
+  """
+  IT
+
+  """
+  Jersey
+  """
+  JE
+
+  """
+  Jamaica
+  """
+  JM
+
+  """
+  Jordan
+  """
+  JO
+
+  """
+  Japan
+  """
+  JP
+
+  """
+  Kenya
+  """
+  KE
+
+  """
+  Kyrgyzstan
+  """
+  KG
+
+  """
+  Cambodia
+  """
+  KH
+
+  """
+  Kiribati
+  """
+  KI
+
+  """
+  Comoros
+  """
+  KM
+
+  """
+  Saint Kitts and Nevis
+  """
+  KN
+
+  """
+  Korea, South
+  """
+  KR
+
+  """
+  Kuwait
+  """
+  KW
+
+  """
+  Cayman Islands
+  """
+  KY
+
+  """
+  Kazakhstan
+  """
+  KZ
+
+  """
+  Laos
+  """
+  LA
+
+  """
+  Lebanon
+  """
+  LB
+
+  """
+  Saint Lucia
+  """
+  LC
+
+  """
+  Liechtenstein
+  """
+  LI
+
+  """
+  Sri Lanka
+  """
+  LK
+
+  """
+  Liberia
+  """
+  LR
+
+  """
+  Lesotho
+  """
+  LS
+
+  """
+  Lithuania
+  """
+  LT
+
+  """
+  Luxembourg
+  """
+  LU
+
+  """
+  Latvia
+  """
+  LV
+
+  """
+  Libya
+  """
+  LY
+
+  """
+  Morocco
+  """
+  MA
+
+  """
+  Monaco
+  """
+  MC
+
+  """
+  Moldova
+  """
+  MD
+
+  """
+  Montenegro
+  """
+  ME
+
+  """
+  Saint Martin (French part)
+  """
+  MF
+
+  """
+  Madagascar
+  """
+  MG
+
+  """
+  Marshall Islands
+  """
+  MH
+
+  """
+  Macedonia
+  """
+  MK
+
+  """
+  Mali
+  """
+  ML
+
+  """
+  Myanmar
+  """
+  MM
+
+  """
+  Mongolia
+  """
+  MN
+
+  """
+  Macau
+  """
+  MO
+
+  """
+  Northern Mariana Islands
+  """
+  MP
+
+  """
+  Martinique
+  """
+  MQ
+
+  """
+  Mauritania
+  """
+  MR
+
+  """
+  Montserrat
+  """
+  MS
+
+  """
+  Malta
+  """
+  MT
+
+  """
+  Mauritius
+  """
+  MU
+
+  """
+  Maldives
+  """
+  MV
+
+  """
+  Malawi
+  """
+  MW
+
+  """
+  Mexico
+  """
+  MX
+
+  """
+  Malaysia
+  """
+  MY
+
+  """
+  Mozambique
+  """
+  MZ
+
+  """
+  Namibia
+  """
+  NA
+
+  """
+  New Caledonia
+  """
+  NC
+
+  """
+  Niger
+  """
+  NE
+
+  """
+  Norfolk Island
+  """
+  NF
+
+  """
+  Nigeria
+  """
+  NG
+
+  """
+  Nicaragua
+  """
+  NI
+
+  """
+  Netherlands
+  """
+  NL
+
+  """
+  Norway
+  """
+  NO
+
+  """
+  Nepal
+  """
+  NP
+
+  """
+  Nauru
+  """
+  NR
+
+  """
+  Niue
+  """
+  NU
+
+  """
+  New Zealand
+  """
+  NZ
+
+  """
+  Oman
+  """
+  OM
+
+  """
+  Panama
+  """
+  PA
+
+  """
+  Peru
+  """
+  PE
+
+  """
+  French Polynesia
+  """
+  PF
+
+  """
+  Papua New Guinea
+  """
+  PG
+
+  """
+  Philippines
+  """
+  PH
+
+  """
+  Pakistan
+  """
+  PK
+
+  """
+  Poland
+  """
+  PL
+
+  """
+  Saint Pierre and Miquelon
+  """
+  PM
+
+  """
+  Pitcairn
+  """
+  PN
+
+  """
+  Puerto Rico
+  """
+  PR
+
+  """
+  Palestine
+  """
+  PS
+
+  """
+  Portugal
+  """
+  PT
+
+  """
+  Palau
+  """
+  PW
+
+  """
+  Paraguay
+  """
+  PY
+
+  """
+  Qatar
+  """
+  QA
+
+  """
+  Reunion
+  """
+  RE
+
+  """
+  Romania
+  """
+  RO
+
+  """
+  Serbia
+  """
+  RS
+
+  """
+  Russian Federation
+  """
+  RU
+
+  """
+  Rwanda
+  """
+  RW
+
+  """
+  Saudi Arabia
+  """
+  SA
+
+  """
+  Solomon Islands
+  """
+  SB
+
+  """
+  Seychelles
+  """
+  SC
+
+  """
+  Sudan
+  """
+  SD
+
+  """
+  Sweden
+  """
+  SE
+
+  """
+  Singapore
+  """
+  SG
+
+  """
+  Saint Helena
+  """
+  SH
+
+  """
+  Slovenia
+  """
+  SI
+
+  """
+  Svalbard and Jan Mayen Islands
+  """
+  SJ
+
+  """
+  Slovakia
+  """
+  SK
+
+  """
+  Sierra Leone
+  """
+  SL
+
+  """
+  San Marino
+  """
+  SM
+
+  """
+  Senegal
+  """
+  SN
+
+  """
+  Somalia
+  """
+  SO
+
+  """
+  Suriname
+  """
+  SR
+
+  """
+  South Sudan
+  """
+  SS
+
+  """
+  Sao Tome and Principe
+  """
+  ST
+
+  """
+  El Salvador
+  """
+  SV
+
+  """
+  Sint Maarten (Dutch part)
+  """
+  SX
+
+  """
+  Swaziland
+  """
+  SZ
+
+  """
+  Turks and Caicos Islands
+  """
+  TC
+
+  """
+  Chad
+  """
+  TD
+
+  """
+  French Southern Lands
+  """
+  TF
+
+  """
+  Togo
+  """
+  TG
+
+  """
+  Thailand
+  """
+  TH
+
+  """
+  Tajikistan
+  """
+  TJ
+
+  """
+  Tokelau
+  """
+  TK
+
+  """
+  Timor-Leste
+  """
+  TL
+
+  """
+  Turkmenistan
+  """
+  TM
+
+  """
+  Tunisia
+  """
+  TN
+
+  """
+  Tonga
+  """
+  TO
+
+  """
+  Türkiye
+  """
+  TR
+
+  """
+  Trinidad and Tobago
+  """
+  TT
+
+  """
+  Tuvalu
+  """
+  TV
+
+  """
+  Taiwan
+  """
+  TW
+
+  """
+  Tanzania
+  """
+  TZ
+
+  """
+  Ukraine
+  """
+  UA
+
+  """
+  Uganda
+  """
+  UG
+
+  """
+  United States Minor Outlying Islands
+  """
+  UM
+
+  """
+  United States of America
+  """
+  US
+
+  """
+  Uruguay
+  """
+  UY
+
+  """
+  Uzbekistan
+  """
+  UZ
+
+  """
+  Vatican City
+  """
+  VA
+
+  """
+  Saint Vincent and the Grenadines
+  """
+  VC
+
+  """
+  Venezuela
+  """
+  VE
+
+  """
+  Virgin Islands, British
+  """
+  VG
+
+  """
+  Virgin Islands, U.S.
+  """
+  VI
+
+  """
+  Vietnam
+  """
+  VN
+
+  """
+  Vanuatu
+  """
+  VU
+
+  """
+  Wallis and Futuna Islands
+  """
+  WF
+
+  """
+  Samoa
+  """
+  WS
+
+  """
+  Yemen
+  """
+  YE
+
+  """
+  Mayotte
+  """
+  YT
+
+  """
+  South Africa
+  """
+  ZA
+
+  """
+  Zambia
+  """
+  ZM
+
+  """
+  Zimbabwe
+  """
+  ZW
+}
+
+"""
+A goal associated with a GitHub Sponsors listing, representing a target the sponsored maintainer would like to attain.
+"""
+type SponsorsGoal {
+  """
+  A description of the goal from the maintainer.
+  """
+  description: String
+
+  """
+  What the objective of this goal is.
+  """
+  kind: SponsorsGoalKind!
+
+  """
+  The percentage representing how complete this goal is, between 0-100.
+  """
+  percentComplete: Int!
+
+  """
+  What the goal amount is. Represents an amount in USD for monthly sponsorship
+  amount goals. Represents a count of unique sponsors for total sponsors count goals.
+  """
+  targetValue: Int!
+
+  """
+  A brief summary of the kind and target value of this goal.
+  """
+  title: String!
+}
+
+"""
+The different kinds of goals a GitHub Sponsors member can have.
+"""
+enum SponsorsGoalKind {
+  """
+  The goal is about getting a certain amount in USD from sponsorships each month.
+  """
+  MONTHLY_SPONSORSHIP_AMOUNT
+
+  """
+  The goal is about reaching a certain number of sponsors.
+  """
+  TOTAL_SPONSORS_COUNT
+}
+
+"""
+A GitHub Sponsors listing.
+"""
+type SponsorsListing implements Node {
+  """
+  The current goal the maintainer is trying to reach with GitHub Sponsors, if any.
+  """
+  activeGoal: SponsorsGoal
+
+  """
+  The Stripe Connect account currently in use for payouts for this Sponsors
+  listing, if any. Will only return a value when queried by the maintainer
+  themselves, or by an admin of the sponsorable organization.
+  """
+  activeStripeConnectAccount: StripeConnectAccount
+
+  """
+  The name of the country or region with the maintainer's bank account or fiscal
+  host. Will only return a value when queried by the maintainer themselves, or
+  by an admin of the sponsorable organization.
+  """
+  billingCountryOrRegion: String
+
+  """
+  The email address used by GitHub to contact the sponsorable about their GitHub
+  Sponsors profile. Will only return a value when queried by the maintainer
+  themselves, or by an admin of the sponsorable organization.
+  """
+  contactEmailAddress: String
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The HTTP path for the Sponsors dashboard for this Sponsors listing.
+  """
+  dashboardResourcePath: URI!
+
+  """
+  The HTTP URL for the Sponsors dashboard for this Sponsors listing.
+  """
+  dashboardUrl: URI!
+
+  """
+  The records featured on the GitHub Sponsors profile.
+  """
+  featuredItems(
+    """
+    The types of featured items to return.
+    """
+    featureableTypes: [SponsorsListingFeaturedItemFeatureableType!] = [REPOSITORY, USER]
+  ): [SponsorsListingFeaturedItem!]!
+
+  """
+  The fiscal host used for payments, if any. Will only return a value when
+  queried by the maintainer themselves, or by an admin of the sponsorable organization.
+  """
+  fiscalHost: Organization
+
+  """
+  The full description of the listing.
+  """
+  fullDescription: String!
+
+  """
+  The full description of the listing rendered to HTML.
+  """
+  fullDescriptionHTML: HTML!
+
+  """
+  The Node ID of the SponsorsListing object
+  """
+  id: ID!
+
+  """
+  Whether this listing is publicly visible.
+  """
+  isPublic: Boolean!
+
+  """
+  The listing's full name.
+  """
+  name: String!
+
+  """
+  A future date on which this listing is eligible to receive a payout.
+  """
+  nextPayoutDate: Date
+
+  """
+  The name of the country or region where the maintainer resides. Will only
+  return a value when queried by the maintainer themselves, or by an admin of
+  the sponsorable organization.
+  """
+  residenceCountryOrRegion: String
+
+  """
+  The HTTP path for this Sponsors listing.
+  """
+  resourcePath: URI!
+
+  """
+  The short description of the listing.
+  """
+  shortDescription: String!
+
+  """
+  The short name of the listing.
+  """
+  slug: String!
+
+  """
+  The entity this listing represents who can be sponsored on GitHub Sponsors.
+  """
+  sponsorable: Sponsorable!
+
+  """
+  The tiers for this GitHub Sponsors profile.
+  """
+  tiers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether to include tiers that aren't published. Only admins of the Sponsors
+    listing can see draft tiers. Only admins of the Sponsors listing and viewers
+    who are currently sponsoring on a retired tier can see those retired tiers.
+    Defaults to including only published tiers, which are visible to anyone who
+    can see the GitHub Sponsors profile.
+    """
+    includeUnpublished: Boolean = false
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for Sponsors tiers returned from the connection.
+    """
+    orderBy: SponsorsTierOrder = {field: MONTHLY_PRICE_IN_CENTS, direction: ASC}
+  ): SponsorsTierConnection
+
+  """
+  The HTTP URL for this Sponsors listing.
+  """
+  url: URI!
+}
+
+"""
+A record that can be featured on a GitHub Sponsors profile.
+"""
+union SponsorsListingFeatureableItem = Repository | User
+
+"""
+A record that is promoted on a GitHub Sponsors profile.
+"""
+type SponsorsListingFeaturedItem implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Will either be a description from the sponsorable maintainer about why they
+  featured this item, or the item's description itself, such as a user's bio
+  from their GitHub profile page.
+  """
+  description: String
+
+  """
+  The record that is featured on the GitHub Sponsors profile.
+  """
+  featureable: SponsorsListingFeatureableItem!
+
+  """
+  The Node ID of the SponsorsListingFeaturedItem object
+  """
+  id: ID!
+
+  """
+  The position of this featured item on the GitHub Sponsors profile with a lower
+  position indicating higher precedence. Starts at 1.
+  """
+  position: Int!
+
+  """
+  The GitHub Sponsors profile that features this record.
+  """
+  sponsorsListing: SponsorsListing!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The different kinds of records that can be featured on a GitHub Sponsors profile page.
+"""
+enum SponsorsListingFeaturedItemFeatureableType {
+  """
+  A repository owned by the user or organization with the GitHub Sponsors profile.
+  """
+  REPOSITORY
+
+  """
+  A user who belongs to the organization with the GitHub Sponsors profile.
+  """
+  USER
+}
+
+"""
+A GitHub Sponsors tier associated with a GitHub Sponsors listing.
+"""
+type SponsorsTier implements Node {
+  """
+  SponsorsTier information only visible to users that can administer the associated Sponsors listing.
+  """
+  adminInfo: SponsorsTierAdminInfo
+
+  """
+  Get a different tier for this tier's maintainer that is at the same frequency
+  as this tier but with an equal or lesser cost. Returns the published tier with
+  the monthly price closest to this tier's without going over.
+  """
+  closestLesserValueTier: SponsorsTier
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The description of the tier.
+  """
+  description: String!
+
+  """
+  The tier description rendered to HTML
+  """
+  descriptionHTML: HTML!
+
+  """
+  The Node ID of the SponsorsTier object
+  """
+  id: ID!
+
+  """
+  Whether this tier was chosen at checkout time by the sponsor rather than
+  defined ahead of time by the maintainer who manages the Sponsors listing.
+  """
+  isCustomAmount: Boolean!
+
+  """
+  Whether this tier is only for use with one-time sponsorships.
+  """
+  isOneTime: Boolean!
+
+  """
+  How much this tier costs per month in cents.
+  """
+  monthlyPriceInCents: Int!
+
+  """
+  How much this tier costs per month in USD.
+  """
+  monthlyPriceInDollars: Int!
+
+  """
+  The name of the tier.
+  """
+  name: String!
+
+  """
+  The sponsors listing that this tier belongs to.
+  """
+  sponsorsListing: SponsorsListing!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+SponsorsTier information only visible to users that can administer the associated Sponsors listing.
+"""
+type SponsorsTierAdminInfo {
+  """
+  Indicates whether this tier is still a work in progress by the sponsorable and
+  not yet published to the associated GitHub Sponsors profile. Draft tiers
+  cannot be used for new sponsorships and will not be in use on existing
+  sponsorships. Draft tiers cannot be seen by anyone but the admins of the
+  GitHub Sponsors profile.
+  """
+  isDraft: Boolean!
+
+  """
+  Indicates whether this tier is published to the associated GitHub Sponsors
+  profile. Published tiers are visible to anyone who can see the GitHub Sponsors
+  profile, and are available for use in sponsorships if the GitHub Sponsors
+  profile is publicly visible.
+  """
+  isPublished: Boolean!
+
+  """
+  Indicates whether this tier has been retired from the associated GitHub
+  Sponsors profile. Retired tiers are no longer shown on the GitHub Sponsors
+  profile and cannot be chosen for new sponsorships. Existing sponsorships may
+  still use retired tiers if the sponsor selected the tier before it was retired.
+  """
+  isRetired: Boolean!
+
+  """
+  The sponsorships using this tier.
+  """
+  sponsorships(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether or not to return private sponsorships using this tier. Defaults to
+    only returning public sponsorships on this tier.
+    """
+    includePrivate: Boolean = false
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsorships returned from this connection. If left
+    blank, the sponsorships will be ordered based on relevancy to the viewer.
+    """
+    orderBy: SponsorshipOrder
+  ): SponsorshipConnection!
+}
+
+"""
+The connection type for SponsorsTier.
+"""
+type SponsorsTierConnection {
+  """
+  A list of edges.
+  """
+  edges: [SponsorsTierEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [SponsorsTier]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SponsorsTierEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SponsorsTier
+}
+
+"""
+Ordering options for Sponsors tiers connections.
+"""
+input SponsorsTierOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order tiers by.
+  """
+  field: SponsorsTierOrderField!
+}
+
+"""
+Properties by which Sponsors tiers connections can be ordered.
+"""
+enum SponsorsTierOrderField {
+  """
+  Order tiers by creation time.
+  """
+  CREATED_AT
+
+  """
+  Order tiers by their monthly price in cents
+  """
+  MONTHLY_PRICE_IN_CENTS
+}
+
+"""
+A sponsorship relationship between a sponsor and a maintainer
+"""
+type Sponsorship implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the Sponsorship object
+  """
+  id: ID!
+
+  """
+  Whether the sponsorship is active. False implies the sponsor is a past sponsor
+  of the maintainer, while true implies they are a current sponsor.
+  """
+  isActive: Boolean!
+
+  """
+  Whether this sponsorship represents a one-time payment versus a recurring sponsorship.
+  """
+  isOneTimePayment: Boolean!
+
+  """
+  Whether the sponsor has chosen to receive sponsorship update emails sent from
+  the sponsorable. Only returns a non-null value when the viewer has permission to know this.
+  """
+  isSponsorOptedIntoEmail: Boolean
+
+  """
+  The entity that is being sponsored
+  """
+  maintainer: User!
+    @deprecated(
+      reason: "`Sponsorship.maintainer` will be removed. Use `Sponsorship.sponsorable` instead. Removal on 2020-04-01 UTC."
+    )
+
+  """
+  The platform that was most recently used to pay for the sponsorship.
+  """
+  paymentSource: SponsorshipPaymentSource
+
+  """
+  The privacy level for this sponsorship.
+  """
+  privacyLevel: SponsorshipPrivacy!
+
+  """
+  The user that is sponsoring. Returns null if the sponsorship is private or if sponsor is not a user.
+  """
+  sponsor: User
+    @deprecated(
+      reason: "`Sponsorship.sponsor` will be removed. Use `Sponsorship.sponsorEntity` instead. Removal on 2020-10-01 UTC."
+    )
+
+  """
+  The user or organization that is sponsoring, if you have permission to view them.
+  """
+  sponsorEntity: Sponsor
+
+  """
+  The entity that is being sponsored
+  """
+  sponsorable: Sponsorable!
+
+  """
+  The associated sponsorship tier
+  """
+  tier: SponsorsTier
+
+  """
+  Identifies the date and time when the current tier was chosen for this sponsorship.
+  """
+  tierSelectedAt: DateTime
+}
+
+"""
+The connection type for Sponsorship.
+"""
+type SponsorshipConnection {
+  """
+  A list of edges.
+  """
+  edges: [SponsorshipEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Sponsorship]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+
+  """
+  The total amount in cents of all recurring sponsorships in the connection
+  whose amount you can view. Does not include one-time sponsorships.
+  """
+  totalRecurringMonthlyPriceInCents: Int!
+
+  """
+  The total amount in USD of all recurring sponsorships in the connection whose
+  amount you can view. Does not include one-time sponsorships.
+  """
+  totalRecurringMonthlyPriceInDollars: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SponsorshipEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Sponsorship
+}
+
+"""
+An update sent to sponsors of a user or organization on GitHub Sponsors.
+"""
+type SponsorshipNewsletter implements Node {
+  """
+  The author of the newsletter.
+  """
+  author: User
+
+  """
+  The contents of the newsletter, the message the sponsorable wanted to give.
+  """
+  body: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the SponsorshipNewsletter object
+  """
+  id: ID!
+
+  """
+  Indicates if the newsletter has been made available to sponsors.
+  """
+  isPublished: Boolean!
+
+  """
+  The user or organization this newsletter is from.
+  """
+  sponsorable: Sponsorable!
+
+  """
+  The subject of the newsletter, what it's about.
+  """
+  subject: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for SponsorshipNewsletter.
+"""
+type SponsorshipNewsletterConnection {
+  """
+  A list of edges.
+  """
+  edges: [SponsorshipNewsletterEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [SponsorshipNewsletter]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SponsorshipNewsletterEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SponsorshipNewsletter
+}
+
+"""
+Ordering options for sponsorship newsletter connections.
+"""
+input SponsorshipNewsletterOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order sponsorship newsletters by.
+  """
+  field: SponsorshipNewsletterOrderField!
+}
+
+"""
+Properties by which sponsorship update connections can be ordered.
+"""
+enum SponsorshipNewsletterOrderField {
+  """
+  Order sponsorship newsletters by when they were created.
+  """
+  CREATED_AT
+}
+
+"""
+Ordering options for sponsorship connections.
+"""
+input SponsorshipOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order sponsorship by.
+  """
+  field: SponsorshipOrderField!
+}
+
+"""
+Properties by which sponsorship connections can be ordered.
+"""
+enum SponsorshipOrderField {
+  """
+  Order sponsorship by creation time.
+  """
+  CREATED_AT
+}
+
+"""
+How payment was made for funding a GitHub Sponsors sponsorship.
+"""
+enum SponsorshipPaymentSource {
+  """
+  Payment was made through GitHub.
+  """
+  GITHUB
+
+  """
+  Payment was made through Patreon.
+  """
+  PATREON
+}
+
+"""
+The privacy of a sponsorship
+"""
+enum SponsorshipPrivacy {
+  """
+  Private
+  """
+  PRIVATE
+
+  """
+  Public
+  """
+  PUBLIC
+}
+
+"""
+The possible default commit messages for squash merges.
+"""
+enum SquashMergeCommitMessage {
+  """
+  Default to a blank commit message.
+  """
+  BLANK
+
+  """
+  Default to the branch's commit messages.
+  """
+  COMMIT_MESSAGES
+
+  """
+  Default to the pull request's body.
+  """
+  PR_BODY
+}
+
+"""
+The possible default commit titles for squash merges.
+"""
+enum SquashMergeCommitTitle {
+  """
+  Default to the commit's title (if only one commit) or the pull request's title (when more than one commit).
+  """
+  COMMIT_OR_PR_TITLE
+
+  """
+  Default to the pull request's title.
+  """
+  PR_TITLE
+}
+
+"""
+Represents an SSH signature on a Commit or Tag.
+"""
+type SshSignature implements GitSignature {
+  """
+  Email used to sign this object.
+  """
+  email: String!
+
+  """
+  True if the signature is valid and verified by GitHub.
+  """
+  isValid: Boolean!
+
+  """
+  Hex-encoded fingerprint of the key that signed this object.
+  """
+  keyFingerprint: String
+
+  """
+  Payload for GPG signing object. Raw ODB object without the signature header.
+  """
+  payload: String!
+
+  """
+  ASCII-armored signature header from object.
+  """
+  signature: String!
+
+  """
+  GitHub user corresponding to the email signing this commit.
+  """
+  signer: User
+
+  """
+  The state of this signature. `VALID` if signature is valid and verified by
+  GitHub, otherwise represents reason why signature is considered invalid.
+  """
+  state: GitSignatureState!
+
+  """
+  True if the signature was made with GitHub's signing key.
+  """
+  wasSignedByGitHub: Boolean!
+}
+
+"""
+Ways in which star connections can be ordered.
+"""
+input StarOrder {
+  """
+  The direction in which to order nodes.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order nodes by.
+  """
+  field: StarOrderField!
+}
+
+"""
+Properties by which star connections can be ordered.
+"""
+enum StarOrderField {
+  """
+  Allows ordering a list of stars by when they were created.
+  """
+  STARRED_AT
+}
+
+"""
+The connection type for User.
+"""
+type StargazerConnection {
+  """
+  A list of edges.
+  """
+  edges: [StargazerEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a user that's starred a repository.
+"""
+type StargazerEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+  node: User!
+
+  """
+  Identifies when the item was starred.
+  """
+  starredAt: DateTime!
+}
+
+"""
+Things that can be starred.
+"""
+interface Starrable {
+  """
+  The Node ID of the Starrable object
+  """
+  id: ID!
+
+  """
+  Returns a count of how many stargazers there are on this object
+  """
+  stargazerCount: Int!
+
+  """
+  A list of users who have starred this starrable.
+  """
+  stargazers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: StarOrder
+  ): StargazerConnection!
+
+  """
+  Returns a boolean indicating whether the viewing user has starred this starrable.
+  """
+  viewerHasStarred: Boolean!
+}
+
+"""
+The connection type for Repository.
+"""
+type StarredRepositoryConnection {
+  """
+  A list of edges.
+  """
+  edges: [StarredRepositoryEdge]
+
+  """
+  Is the list of stars for this user truncated? This is true for users that have many stars.
+  """
+  isOverLimit: Boolean!
+
+  """
+  A list of nodes.
+  """
+  nodes: [Repository]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a starred repository.
+"""
+type StarredRepositoryEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+  node: Repository!
+
+  """
+  Identifies when the item was starred.
+  """
+  starredAt: DateTime!
+}
+
+"""
+Autogenerated input type of StartOrganizationMigration
+"""
+input StartOrganizationMigrationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The migration source access token.
+  """
+  sourceAccessToken: String!
+
+  """
+  The URL of the organization to migrate.
+  """
+  sourceOrgUrl: URI!
+
+  """
+  The ID of the enterprise the target organization belongs to.
+  """
+  targetEnterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The name of the target organization.
+  """
+  targetOrgName: String!
+}
+
+"""
+Autogenerated return type of StartOrganizationMigration
+"""
+type StartOrganizationMigrationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new organization migration.
+  """
+  orgMigration: OrganizationMigration
+}
+
+"""
+Autogenerated input type of StartRepositoryMigration
+"""
+input StartRepositoryMigrationInput {
+  """
+  The migration source access token.
+  """
+  accessToken: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Whether to continue the migration on error. Defaults to `true`.
+  """
+  continueOnError: Boolean
+
+  """
+  The signed URL to access the user-uploaded git archive.
+  """
+  gitArchiveUrl: String
+
+  """
+  The GitHub personal access token of the user importing to the target repository.
+  """
+  githubPat: String
+
+  """
+  Whether to lock the source repository.
+  """
+  lockSource: Boolean
+
+  """
+  The signed URL to access the user-uploaded metadata archive.
+  """
+  metadataArchiveUrl: String
+
+  """
+  The ID of the organization that will own the imported repository.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Organization"])
+
+  """
+  The name of the imported repository.
+  """
+  repositoryName: String!
+
+  """
+  Whether to skip migrating releases for the repository.
+  """
+  skipReleases: Boolean
+
+  """
+  The ID of the migration source.
+  """
+  sourceId: ID! @possibleTypes(concreteTypes: ["MigrationSource"])
+
+  """
+  The URL of the source repository.
+  """
+  sourceRepositoryUrl: URI
+
+  """
+  The visibility of the imported repository.
+  """
+  targetRepoVisibility: String
+}
+
+"""
+Autogenerated return type of StartRepositoryMigration
+"""
+type StartRepositoryMigrationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new repository migration.
+  """
+  repositoryMigration: RepositoryMigration
+}
+
+"""
+Represents a commit status.
+"""
+type Status implements Node {
+  """
+  A list of status contexts and check runs for this commit.
+  """
+  combinedContexts(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): StatusCheckRollupContextConnection!
+
+  """
+  The commit this status is attached to.
+  """
+  commit: Commit
+
+  """
+  Looks up an individual status context by context name.
+  """
+  context(
+    """
+    The context name.
+    """
+    name: String!
+  ): StatusContext
+
+  """
+  The individual status contexts for this commit.
+  """
+  contexts: [StatusContext!]!
+
+  """
+  The Node ID of the Status object
+  """
+  id: ID!
+
+  """
+  The combined commit status.
+  """
+  state: StatusState!
+}
+
+"""
+Required status check
+"""
+type StatusCheckConfiguration {
+  """
+  The status check context name that must be present on the commit.
+  """
+  context: String!
+
+  """
+  The optional integration ID that this status check must originate from.
+  """
+  integrationId: Int
+}
+
+"""
+Required status check
+"""
+input StatusCheckConfigurationInput {
+  """
+  The status check context name that must be present on the commit.
+  """
+  context: String!
+
+  """
+  The optional integration ID that this status check must originate from.
+  """
+  integrationId: Int
+}
+
+"""
+Represents the rollup for both the check runs and status for a commit.
+"""
+type StatusCheckRollup implements Node {
+  """
+  The commit the status and check runs are attached to.
+  """
+  commit: Commit
+
+  """
+  A list of status contexts and check runs for this commit.
+  """
+  contexts(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): StatusCheckRollupContextConnection!
+
+  """
+  The Node ID of the StatusCheckRollup object
+  """
+  id: ID!
+
+  """
+  The combined status for the commit.
+  """
+  state: StatusState!
+}
+
+"""
+Types that can be inside a StatusCheckRollup context.
+"""
+union StatusCheckRollupContext = CheckRun | StatusContext
+
+"""
+The connection type for StatusCheckRollupContext.
+"""
+type StatusCheckRollupContextConnection {
+  """
+  The number of check runs in this rollup.
+  """
+  checkRunCount: Int!
+
+  """
+  Counts of check runs by state.
+  """
+  checkRunCountsByState: [CheckRunStateCount!]
+
+  """
+  A list of edges.
+  """
+  edges: [StatusCheckRollupContextEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [StatusCheckRollupContext]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  The number of status contexts in this rollup.
+  """
+  statusContextCount: Int!
+
+  """
+  Counts of status contexts by state.
+  """
+  statusContextCountsByState: [StatusContextStateCount!]
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type StatusCheckRollupContextEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: StatusCheckRollupContext
+}
+
+"""
+Represents an individual commit status context
+"""
+type StatusContext implements Node & RequirableByPullRequest {
+  """
+  The avatar of the OAuth application or the user that created the status
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int = 40
+  ): URI
+
+  """
+  This commit this status context is attached to.
+  """
+  commit: Commit
+
+  """
+  The name of this status context.
+  """
+  context: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created this status context.
+  """
+  creator: Actor
+
+  """
+  The description for this status context.
+  """
+  description: String
+
+  """
+  The Node ID of the StatusContext object
+  """
+  id: ID!
+
+  """
+  Whether this is required to pass before merging for a specific pull request.
+  """
+  isRequired(
+    """
+    The id of the pull request this is required for
+    """
+    pullRequestId: ID
+
+    """
+    The number of the pull request this is required for
+    """
+    pullRequestNumber: Int
+  ): Boolean!
+
+  """
+  The state of this status context.
+  """
+  state: StatusState!
+
+  """
+  The URL for this status context.
+  """
+  targetUrl: URI
+}
+
+"""
+Represents a count of the state of a status context.
+"""
+type StatusContextStateCount {
+  """
+  The number of statuses with this state.
+  """
+  count: Int!
+
+  """
+  The state of a status context.
+  """
+  state: StatusState!
+}
+
+"""
+The possible commit status states.
+"""
+enum StatusState {
+  """
+  Status is errored.
+  """
+  ERROR
+
+  """
+  Status is expected.
+  """
+  EXPECTED
+
+  """
+  Status is failing.
+  """
+  FAILURE
+
+  """
+  Status is pending.
+  """
+  PENDING
+
+  """
+  Status is successful.
+  """
+  SUCCESS
+}
+
+"""
+A Stripe Connect account for receiving sponsorship funds from GitHub Sponsors.
+"""
+type StripeConnectAccount {
+  """
+  The account number used to identify this Stripe Connect account.
+  """
+  accountId: String!
+
+  """
+  The name of the country or region of an external account, such as a bank
+  account, tied to the Stripe Connect account. Will only return a value when
+  queried by the maintainer of the associated GitHub Sponsors profile
+  themselves, or by an admin of the sponsorable organization.
+  """
+  billingCountryOrRegion: String
+
+  """
+  The name of the country or region of the Stripe Connect account. Will only
+  return a value when queried by the maintainer of the associated GitHub
+  Sponsors profile themselves, or by an admin of the sponsorable organization.
+  """
+  countryOrRegion: String
+
+  """
+  Whether this Stripe Connect account is currently in use for the associated GitHub Sponsors profile.
+  """
+  isActive: Boolean!
+
+  """
+  The GitHub Sponsors profile associated with this Stripe Connect account.
+  """
+  sponsorsListing: SponsorsListing!
+
+  """
+  The URL to access this Stripe Connect account on Stripe's website.
+  """
+  stripeDashboardUrl: URI!
+}
+
+"""
+Autogenerated input type of SubmitPullRequestReview
+"""
+input SubmitPullRequestReviewInput {
+  """
+  The text field to set on the Pull Request Review.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The event to send to the Pull Request Review.
+  """
+  event: PullRequestReviewEvent!
+
+  """
+  The Pull Request ID to submit any pending reviews.
+  """
+  pullRequestId: ID @possibleTypes(concreteTypes: ["PullRequest"])
+
+  """
+  The Pull Request Review ID to submit.
+  """
+  pullRequestReviewId: ID @possibleTypes(concreteTypes: ["PullRequestReview"])
+}
+
+"""
+Autogenerated return type of SubmitPullRequestReview
+"""
+type SubmitPullRequestReviewPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The submitted pull request review.
+  """
+  pullRequestReview: PullRequestReview
+}
+
+"""
+A pointer to a repository at a specific revision embedded inside another repository.
+"""
+type Submodule {
+  """
+  The branch of the upstream submodule for tracking updates
+  """
+  branch: String
+
+  """
+  The git URL of the submodule repository
+  """
+  gitUrl: URI!
+
+  """
+  The name of the submodule in .gitmodules
+  """
+  name: String!
+
+  """
+  The name of the submodule in .gitmodules (Base64-encoded)
+  """
+  nameRaw: Base64String!
+
+  """
+  The path in the superproject that this submodule is located in
+  """
+  path: String!
+
+  """
+  The path in the superproject that this submodule is located in (Base64-encoded)
+  """
+  pathRaw: Base64String!
+
+  """
+  The commit revision of the subproject repository being tracked by the submodule
+  """
+  subprojectCommitOid: GitObjectID
+}
+
+"""
+The connection type for Submodule.
+"""
+type SubmoduleConnection {
+  """
+  A list of edges.
+  """
+  edges: [SubmoduleEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Submodule]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SubmoduleEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Submodule
+}
+
+"""
+Entities that can be subscribed to for web and email notifications.
+"""
+interface Subscribable {
+  """
+  The Node ID of the Subscribable object
+  """
+  id: ID!
+
+  """
+  Check if the viewer is able to change their subscription status for the repository.
+  """
+  viewerCanSubscribe: Boolean!
+
+  """
+  Identifies if the viewer is watching, not watching, or ignoring the subscribable entity.
+  """
+  viewerSubscription: SubscriptionState
+}
+
+"""
+Entities that can be subscribed to for web and email notifications.
+"""
+interface SubscribableThread {
+  """
+  The Node ID of the SubscribableThread object
+  """
+  id: ID!
+
+  """
+  Identifies the viewer's thread subscription form action.
+  """
+  viewerThreadSubscriptionFormAction: ThreadSubscriptionFormAction
+
+  """
+  Identifies the viewer's thread subscription status.
+  """
+  viewerThreadSubscriptionStatus: ThreadSubscriptionState
+}
+
+"""
+Represents a 'subscribed' event on a given `Subscribable`.
+"""
+type SubscribedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the SubscribedEvent object
+  """
+  id: ID!
+
+  """
+  Object referenced by event.
+  """
+  subscribable: Subscribable!
+}
+
+"""
+The possible states of a subscription.
+"""
+enum SubscriptionState {
+  """
+  The User is never notified.
+  """
+  IGNORED
+
+  """
+  The User is notified of all conversations.
+  """
+  SUBSCRIBED
+
+  """
+  The User is only notified when participating or @mentioned.
+  """
+  UNSUBSCRIBED
+}
+
+"""
+A suggestion to review a pull request based on a user's commit history and review comments.
+"""
+type SuggestedReviewer {
+  """
+  Is this suggestion based on past commits?
+  """
+  isAuthor: Boolean!
+
+  """
+  Is this suggestion based on past review comments?
+  """
+  isCommenter: Boolean!
+
+  """
+  Identifies the user suggested to review the pull request.
+  """
+  reviewer: User!
+}
+
+"""
+Represents a Git tag.
+"""
+type Tag implements GitObject & Node {
+  """
+  An abbreviated version of the Git object ID
+  """
+  abbreviatedOid: String!
+
+  """
+  The HTTP path for this Git object
+  """
+  commitResourcePath: URI!
+
+  """
+  The HTTP URL for this Git object
+  """
+  commitUrl: URI!
+
+  """
+  The Node ID of the Tag object
+  """
+  id: ID!
+
+  """
+  The Git tag message.
+  """
+  message: String
+
+  """
+  The Git tag name.
+  """
+  name: String!
+
+  """
+  The Git object ID
+  """
+  oid: GitObjectID!
+
+  """
+  The Repository the Git object belongs to
+  """
+  repository: Repository!
+
+  """
+  Details about the tag author.
+  """
+  tagger: GitActor
+
+  """
+  The Git object the tag points to.
+  """
+  target: GitObject!
+}
+
+"""
+Parameters to be used for the tag_name_pattern rule
+"""
+type TagNamePatternParameters {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean!
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+Parameters to be used for the tag_name_pattern rule
+"""
+input TagNamePatternParametersInput {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+A team of users in an organization.
+"""
+type Team implements MemberStatusable & Node & Subscribable {
+  """
+  A list of teams that are ancestors of this team.
+  """
+  ancestors(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): TeamConnection!
+
+  """
+  A URL pointing to the team's avatar.
+  """
+  avatarUrl(
+    """
+    The size in pixels of the resulting square image.
+    """
+    size: Int = 400
+  ): URI
+
+  """
+  List of child teams belonging to this team
+  """
+  childTeams(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether to list immediate child teams or all descendant child teams.
+    """
+    immediateOnly: Boolean = true
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: TeamOrder
+
+    """
+    User logins to filter by
+    """
+    userLogins: [String!]
+  ): TeamConnection!
+
+  """
+  The slug corresponding to the organization and team.
+  """
+  combinedSlug: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The description of the team.
+  """
+  description: String
+
+  """
+  Find a team discussion by its number.
+  """
+  discussion(
+    """
+    The sequence number of the discussion to find.
+    """
+    number: Int!
+  ): TeamDiscussion
+
+  """
+  A list of team discussions.
+  """
+  discussions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    If provided, filters discussions according to whether or not they are pinned.
+    """
+    isPinned: Boolean
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: TeamDiscussionOrder
+  ): TeamDiscussionConnection!
+
+  """
+  The HTTP path for team discussions
+  """
+  discussionsResourcePath: URI!
+
+  """
+  The HTTP URL for team discussions
+  """
+  discussionsUrl: URI!
+
+  """
+  The HTTP path for editing this team
+  """
+  editTeamResourcePath: URI!
+
+  """
+  The HTTP URL for editing this team
+  """
+  editTeamUrl: URI!
+
+  """
+  The Node ID of the Team object
+  """
+  id: ID!
+
+  """
+  A list of pending invitations for users to this team
+  """
+  invitations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): OrganizationInvitationConnection
+
+  """
+  Get the status messages members of this entity have set that are either public or visible only to the organization.
+  """
+  memberStatuses(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for user statuses returned from the connection.
+    """
+    orderBy: UserStatusOrder = {field: UPDATED_AT, direction: DESC}
+  ): UserStatusConnection!
+
+  """
+  A list of users who are members of this team.
+  """
+  members(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter by membership type
+    """
+    membership: TeamMembershipType = ALL
+
+    """
+    Order for the connection.
+    """
+    orderBy: TeamMemberOrder
+
+    """
+    The search string to look for.
+    """
+    query: String
+
+    """
+    Filter by team member role
+    """
+    role: TeamMemberRole
+  ): TeamMemberConnection!
+
+  """
+  The HTTP path for the team' members
+  """
+  membersResourcePath: URI!
+
+  """
+  The HTTP URL for the team' members
+  """
+  membersUrl: URI!
+
+  """
+  The name of the team.
+  """
+  name: String!
+
+  """
+  The HTTP path creating a new team
+  """
+  newTeamResourcePath: URI!
+
+  """
+  The HTTP URL creating a new team
+  """
+  newTeamUrl: URI!
+
+  """
+  The notification setting that the team has set.
+  """
+  notificationSetting: TeamNotificationSetting!
+
+  """
+  The organization that owns this team.
+  """
+  organization: Organization!
+
+  """
+  The parent team of the team.
+  """
+  parentTeam: Team
+
+  """
+  The level of privacy the team has.
+  """
+  privacy: TeamPrivacy!
+
+  """
+  Finds and returns the project according to the provided project number.
+  """
+  projectV2(
+    """
+    The Project number.
+    """
+    number: Int!
+  ): ProjectV2
+
+  """
+  List of projects this team has collaborator access to.
+  """
+  projectsV2(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filtering options for projects returned from this connection
+    """
+    filterBy: ProjectV2Filters = {}
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    How to order the returned projects.
+    """
+    orderBy: ProjectV2Order = {field: NUMBER, direction: DESC}
+
+    """
+    The query to search projects by.
+    """
+    query: String = ""
+  ): ProjectV2Connection!
+
+  """
+  A list of repositories this team has access to.
+  """
+  repositories(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for the connection.
+    """
+    orderBy: TeamRepositoryOrder
+
+    """
+    The search string to look for. Repositories will be returned where the name contains your search string.
+    """
+    query: String
+  ): TeamRepositoryConnection!
+
+  """
+  The HTTP path for this team's repositories
+  """
+  repositoriesResourcePath: URI!
+
+  """
+  The HTTP URL for this team's repositories
+  """
+  repositoriesUrl: URI!
+
+  """
+  The HTTP path for this team
+  """
+  resourcePath: URI!
+
+  """
+  What algorithm is used for review assignment for this team
+  """
+  reviewRequestDelegationAlgorithm: TeamReviewAssignmentAlgorithm @preview(toggledBy: "stone-crop-preview")
+
+  """
+  True if review assignment is enabled for this team
+  """
+  reviewRequestDelegationEnabled: Boolean! @preview(toggledBy: "stone-crop-preview")
+
+  """
+  How many team members are required for review assignment for this team
+  """
+  reviewRequestDelegationMemberCount: Int @preview(toggledBy: "stone-crop-preview")
+
+  """
+  When assigning team members via delegation, whether the entire team should be notified as well.
+  """
+  reviewRequestDelegationNotifyTeam: Boolean! @preview(toggledBy: "stone-crop-preview")
+
+  """
+  The slug corresponding to the team.
+  """
+  slug: String!
+
+  """
+  The HTTP path for this team's teams
+  """
+  teamsResourcePath: URI!
+
+  """
+  The HTTP URL for this team's teams
+  """
+  teamsUrl: URI!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this team
+  """
+  url: URI!
+
+  """
+  Team is adminable by the viewer.
+  """
+  viewerCanAdminister: Boolean!
+
+  """
+  Check if the viewer is able to change their subscription status for the repository.
+  """
+  viewerCanSubscribe: Boolean!
+
+  """
+  Identifies if the viewer is watching, not watching, or ignoring the subscribable entity.
+  """
+  viewerSubscription: SubscriptionState
+}
+
+"""
+Audit log entry for a team.add_member event.
+"""
+type TeamAddMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & TeamAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the TeamAddMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  Whether the team was mapped to an LDAP Group.
+  """
+  isLdapMapped: Boolean
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The team associated with the action
+  """
+  team: Team
+
+  """
+  The name of the team
+  """
+  teamName: String
+
+  """
+  The HTTP path for this team
+  """
+  teamResourcePath: URI
+
+  """
+  The HTTP URL for this team
+  """
+  teamUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a team.add_repository event.
+"""
+type TeamAddRepositoryAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData & TeamAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the TeamAddRepositoryAuditEntry object
+  """
+  id: ID!
+
+  """
+  Whether the team was mapped to an LDAP Group.
+  """
+  isLdapMapped: Boolean
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The team associated with the action
+  """
+  team: Team
+
+  """
+  The name of the team
+  """
+  teamName: String
+
+  """
+  The HTTP path for this team
+  """
+  teamResourcePath: URI
+
+  """
+  The HTTP URL for this team
+  """
+  teamUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Metadata for an audit entry with action team.*
+"""
+interface TeamAuditEntryData {
+  """
+  The team associated with the action
+  """
+  team: Team
+
+  """
+  The name of the team
+  """
+  teamName: String
+
+  """
+  The HTTP path for this team
+  """
+  teamResourcePath: URI
+
+  """
+  The HTTP URL for this team
+  """
+  teamUrl: URI
+}
+
+"""
+Audit log entry for a team.change_parent_team event.
+"""
+type TeamChangeParentTeamAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & TeamAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the TeamChangeParentTeamAuditEntry object
+  """
+  id: ID!
+
+  """
+  Whether the team was mapped to an LDAP Group.
+  """
+  isLdapMapped: Boolean
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The new parent team.
+  """
+  parentTeam: Team
+
+  """
+  The name of the new parent team
+  """
+  parentTeamName: String
+
+  """
+  The name of the former parent team
+  """
+  parentTeamNameWas: String
+
+  """
+  The HTTP path for the parent team
+  """
+  parentTeamResourcePath: URI
+
+  """
+  The HTTP URL for the parent team
+  """
+  parentTeamUrl: URI
+
+  """
+  The former parent team.
+  """
+  parentTeamWas: Team
+
+  """
+  The HTTP path for the previous parent team
+  """
+  parentTeamWasResourcePath: URI
+
+  """
+  The HTTP URL for the previous parent team
+  """
+  parentTeamWasUrl: URI
+
+  """
+  The team associated with the action
+  """
+  team: Team
+
+  """
+  The name of the team
+  """
+  teamName: String
+
+  """
+  The HTTP path for this team
+  """
+  teamResourcePath: URI
+
+  """
+  The HTTP URL for this team
+  """
+  teamUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The connection type for Team.
+"""
+type TeamConnection {
+  """
+  A list of edges.
+  """
+  edges: [TeamEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Team]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+A team discussion.
+"""
+type TeamDiscussion implements Comment & Deletable & Node & Reactable & Subscribable & UniformResourceLocatable & Updatable & UpdatableComment {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the discussion's team.
+  """
+  authorAssociation: CommentAuthorAssociation!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  The body as Markdown.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  Identifies the discussion body hash.
+  """
+  bodyVersion: String!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  A list of comments on this discussion.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    When provided, filters the connection such that results begin with the comment with this number.
+    """
+    fromComment: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: TeamDiscussionCommentOrder
+  ): TeamDiscussionCommentConnection!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  The HTTP path for discussion comments
+  """
+  commentsResourcePath: URI!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  The HTTP URL for discussion comments
+  """
+  commentsUrl: URI!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The Node ID of the TeamDiscussion object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Whether or not the discussion is pinned.
+  """
+  isPinned: Boolean!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  Whether or not the discussion is only visible to team members and organization owners.
+  """
+  isPrivate: Boolean!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Identifies the discussion within its team.
+  """
+  number: Int!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The HTTP path for this discussion
+  """
+  resourcePath: URI!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  The team that defines the context of this discussion.
+  """
+  team: Team!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  The title of the discussion
+  """
+  title: String!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this discussion
+  """
+  url: URI!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Whether or not the current viewer can pin this discussion.
+  """
+  viewerCanPin: Boolean!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Check if the viewer is able to change their subscription status for the repository.
+  """
+  viewerCanSubscribe: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+
+  """
+  Identifies if the viewer is watching, not watching, or ignoring the subscribable entity.
+  """
+  viewerSubscription: SubscriptionState
+}
+
+"""
+A comment on a team discussion.
+"""
+type TeamDiscussionComment implements Comment & Deletable & Node & Reactable & UniformResourceLocatable & Updatable & UpdatableComment {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the comment's team.
+  """
+  authorAssociation: CommentAuthorAssociation!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  The body as Markdown.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  The current version of the body content.
+  """
+  bodyVersion: String!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The discussion this comment is about.
+  """
+  discussion: TeamDiscussion!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The Node ID of the TeamDiscussionComment object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Identifies the comment number.
+  """
+  number: Int!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The HTTP path for this comment
+  """
+  resourcePath: URI!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this comment
+  """
+  url: URI!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+}
+
+"""
+The connection type for TeamDiscussionComment.
+"""
+type TeamDiscussionCommentConnection {
+  """
+  A list of edges.
+  """
+  edges: [TeamDiscussionCommentEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [TeamDiscussionComment]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type TeamDiscussionCommentEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: TeamDiscussionComment
+}
+
+"""
+Ways in which team discussion comment connections can be ordered.
+"""
+input TeamDiscussionCommentOrder {
+  """
+  The direction in which to order nodes.
+  """
+  direction: OrderDirection!
+
+  """
+  The field by which to order nodes.
+  """
+  field: TeamDiscussionCommentOrderField!
+}
+
+"""
+Properties by which team discussion comment connections can be ordered.
+"""
+enum TeamDiscussionCommentOrderField {
+  """
+  Allows sequential ordering of team discussion comments (which is equivalent to chronological ordering).
+  """
+  NUMBER
+}
+
+"""
+The connection type for TeamDiscussion.
+"""
+type TeamDiscussionConnection {
+  """
+  A list of edges.
+  """
+  edges: [TeamDiscussionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [TeamDiscussion]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type TeamDiscussionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: TeamDiscussion
+}
+
+"""
+Ways in which team discussion connections can be ordered.
+"""
+input TeamDiscussionOrder {
+  """
+  The direction in which to order nodes.
+  """
+  direction: OrderDirection!
+
+  """
+  The field by which to order nodes.
+  """
+  field: TeamDiscussionOrderField!
+}
+
+"""
+Properties by which team discussion connections can be ordered.
+"""
+enum TeamDiscussionOrderField {
+  """
+  Allows chronological ordering of team discussions.
+  """
+  CREATED_AT
+}
+
+"""
+An edge in a connection.
+"""
+type TeamEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Team
+}
+
+"""
+The connection type for User.
+"""
+type TeamMemberConnection {
+  """
+  A list of edges.
+  """
+  edges: [TeamMemberEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a user who is a member of a team.
+"""
+type TeamMemberEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The HTTP path to the organization's member access page.
+  """
+  memberAccessResourcePath: URI!
+
+  """
+  The HTTP URL to the organization's member access page.
+  """
+  memberAccessUrl: URI!
+  node: User!
+
+  """
+  The role the member has on the team.
+  """
+  role: TeamMemberRole!
+}
+
+"""
+Ordering options for team member connections
+"""
+input TeamMemberOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order team members by.
+  """
+  field: TeamMemberOrderField!
+}
+
+"""
+Properties by which team member connections can be ordered.
+"""
+enum TeamMemberOrderField {
+  """
+  Order team members by creation time
+  """
+  CREATED_AT
+
+  """
+  Order team members by login
+  """
+  LOGIN
+}
+
+"""
+The possible team member roles; either 'maintainer' or 'member'.
+"""
+enum TeamMemberRole {
+  """
+  A team maintainer has permission to add and remove team members.
+  """
+  MAINTAINER
+
+  """
+  A team member has no administrative permissions on the team.
+  """
+  MEMBER
+}
+
+"""
+Defines which types of team members are included in the returned list. Can be one of IMMEDIATE, CHILD_TEAM or ALL.
+"""
+enum TeamMembershipType {
+  """
+  Includes immediate and child team members for the team.
+  """
+  ALL
+
+  """
+  Includes only child team members for the team.
+  """
+  CHILD_TEAM
+
+  """
+  Includes only immediate members of the team.
+  """
+  IMMEDIATE
+}
+
+"""
+The possible team notification values.
+"""
+enum TeamNotificationSetting {
+  """
+  No one will receive notifications.
+  """
+  NOTIFICATIONS_DISABLED
+
+  """
+  Everyone will receive notifications when the team is @mentioned.
+  """
+  NOTIFICATIONS_ENABLED
+}
+
+"""
+Ways in which team connections can be ordered.
+"""
+input TeamOrder {
+  """
+  The direction in which to order nodes.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order nodes by.
+  """
+  field: TeamOrderField!
+}
+
+"""
+Properties by which team connections can be ordered.
+"""
+enum TeamOrderField {
+  """
+  Allows ordering a list of teams by name.
+  """
+  NAME
+}
+
+"""
+The possible team privacy values.
+"""
+enum TeamPrivacy {
+  """
+  A secret team can only be seen by its members.
+  """
+  SECRET
+
+  """
+  A visible team can be seen and @mentioned by every member of the organization.
+  """
+  VISIBLE
+}
+
+"""
+Audit log entry for a team.remove_member event.
+"""
+type TeamRemoveMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & TeamAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the TeamRemoveMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  Whether the team was mapped to an LDAP Group.
+  """
+  isLdapMapped: Boolean
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The team associated with the action
+  """
+  team: Team
+
+  """
+  The name of the team
+  """
+  teamName: String
+
+  """
+  The HTTP path for this team
+  """
+  teamResourcePath: URI
+
+  """
+  The HTTP URL for this team
+  """
+  teamUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a team.remove_repository event.
+"""
+type TeamRemoveRepositoryAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData & TeamAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the TeamRemoveRepositoryAuditEntry object
+  """
+  id: ID!
+
+  """
+  Whether the team was mapped to an LDAP Group.
+  """
+  isLdapMapped: Boolean
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The team associated with the action
+  """
+  team: Team
+
+  """
+  The name of the team
+  """
+  teamName: String
+
+  """
+  The HTTP path for this team
+  """
+  teamResourcePath: URI
+
+  """
+  The HTTP URL for this team
+  """
+  teamUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The connection type for Repository.
+"""
+type TeamRepositoryConnection {
+  """
+  A list of edges.
+  """
+  edges: [TeamRepositoryEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Repository]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a team repository.
+"""
+type TeamRepositoryEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+  node: Repository!
+
+  """
+  The permission level the team has on the repository
+  """
+  permission: RepositoryPermission!
+}
+
+"""
+Ordering options for team repository connections
+"""
+input TeamRepositoryOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order repositories by.
+  """
+  field: TeamRepositoryOrderField!
+}
+
+"""
+Properties by which team repository connections can be ordered.
+"""
+enum TeamRepositoryOrderField {
+  """
+  Order repositories by creation time
+  """
+  CREATED_AT
+
+  """
+  Order repositories by name
+  """
+  NAME
+
+  """
+  Order repositories by permission
+  """
+  PERMISSION
+
+  """
+  Order repositories by push time
+  """
+  PUSHED_AT
+
+  """
+  Order repositories by number of stargazers
+  """
+  STARGAZERS
+
+  """
+  Order repositories by update time
+  """
+  UPDATED_AT
+}
+
+"""
+The possible team review assignment algorithms
+"""
+enum TeamReviewAssignmentAlgorithm @preview(toggledBy: "stone-crop-preview") {
+  """
+  Balance review load across the entire team
+  """
+  LOAD_BALANCE
+
+  """
+  Alternate reviews between each team member
+  """
+  ROUND_ROBIN
+}
+
+"""
+The role of a user on a team.
+"""
+enum TeamRole {
+  """
+  User has admin rights on the team.
+  """
+  ADMIN
+
+  """
+  User is a member of the team.
+  """
+  MEMBER
+}
+
+"""
+A text match within a search result.
+"""
+type TextMatch {
+  """
+  The specific text fragment within the property matched on.
+  """
+  fragment: String!
+
+  """
+  Highlights within the matched fragment.
+  """
+  highlights: [TextMatchHighlight!]!
+
+  """
+  The property matched on.
+  """
+  property: String!
+}
+
+"""
+Represents a single highlight in a search result match.
+"""
+type TextMatchHighlight {
+  """
+  The indice in the fragment where the matched text begins.
+  """
+  beginIndice: Int!
+
+  """
+  The indice in the fragment where the matched text ends.
+  """
+  endIndice: Int!
+
+  """
+  The text matched.
+  """
+  text: String!
+}
+
+"""
+The possible states of a thread subscription form action
+"""
+enum ThreadSubscriptionFormAction {
+  """
+  The User cannot subscribe or unsubscribe to the thread
+  """
+  NONE
+
+  """
+  The User can subscribe to the thread
+  """
+  SUBSCRIBE
+
+  """
+  The User can unsubscribe to the thread
+  """
+  UNSUBSCRIBE
+}
+
+"""
+The possible states of a subscription.
+"""
+enum ThreadSubscriptionState {
+  """
+  The subscription status is currently disabled.
+  """
+  DISABLED
+
+  """
+  The User is never notified because they are ignoring the list
+  """
+  IGNORING_LIST
+
+  """
+  The User is never notified because they are ignoring the thread
+  """
+  IGNORING_THREAD
+
+  """
+  The User is not receiving notifications from this thread
+  """
+  NONE
+
+  """
+  The User is notified because they are watching the list
+  """
+  SUBSCRIBED_TO_LIST
+
+  """
+  The User is notified because they are subscribed to the thread
+  """
+  SUBSCRIBED_TO_THREAD
+
+  """
+  The User is notified because they chose custom settings for this thread.
+  """
+  SUBSCRIBED_TO_THREAD_EVENTS
+
+  """
+  The User is notified because they chose custom settings for this thread.
+  """
+  SUBSCRIBED_TO_THREAD_TYPE
+
+  """
+  The subscription status is currently unavailable.
+  """
+  UNAVAILABLE
+}
+
+"""
+A topic aggregates entities that are related to a subject.
+"""
+type Topic implements Node & Starrable {
+  """
+  The Node ID of the Topic object
+  """
+  id: ID!
+
+  """
+  The topic's name.
+  """
+  name: String!
+
+  """
+  A list of related topics, including aliases of this topic, sorted with the most relevant
+  first. Returns up to 10 Topics.
+  """
+  relatedTopics(
+    """
+    How many topics to return.
+    """
+    first: Int = 3
+  ): [Topic!]!
+
+  """
+  A list of repositories.
+  """
+  repositories(
+    """
+    Array of viewer's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    current viewer owns.
+    """
+    affiliations: [RepositoryAffiliation]
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    If non-null, filters repositories according to whether they have issues enabled
+    """
+    hasIssuesEnabled: Boolean
+
+    """
+    If non-null, filters repositories according to whether they have been locked
+    """
+    isLocked: Boolean
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder
+
+    """
+    Array of owner's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    organization or user being viewed owns.
+    """
+    ownerAffiliations: [RepositoryAffiliation] = [OWNER, COLLABORATOR]
+
+    """
+    If non-null, filters repositories according to privacy
+    """
+    privacy: RepositoryPrivacy
+
+    """
+    If true, only repositories whose owner can be sponsored via GitHub Sponsors will be returned.
+    """
+    sponsorableOnly: Boolean = false
+  ): RepositoryConnection!
+
+  """
+  Returns a count of how many stargazers there are on this object
+  """
+  stargazerCount: Int!
+
+  """
+  A list of users who have starred this starrable.
+  """
+  stargazers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: StarOrder
+  ): StargazerConnection!
+
+  """
+  Returns a boolean indicating whether the viewing user has starred this starrable.
+  """
+  viewerHasStarred: Boolean!
+}
+
+"""
+Metadata for an audit entry with a topic.
+"""
+interface TopicAuditEntryData {
+  """
+  The name of the topic added to the repository
+  """
+  topic: Topic
+
+  """
+  The name of the topic added to the repository
+  """
+  topicName: String
+}
+
+"""
+Reason that the suggested topic is declined.
+"""
+enum TopicSuggestionDeclineReason {
+  """
+  The suggested topic is not relevant to the repository.
+  """
+  NOT_RELEVANT
+
+  """
+  The viewer does not like the suggested topic.
+  """
+  PERSONAL_PREFERENCE
+
+  """
+  The suggested topic is too general for the repository.
+  """
+  TOO_GENERAL
+
+  """
+  The suggested topic is too specific for the repository (e.g. #ruby-on-rails-version-4-2-1).
+  """
+  TOO_SPECIFIC
+}
+
+"""
+The possible states of a tracked issue.
+"""
+enum TrackedIssueStates {
+  """
+  The tracked issue is closed
+  """
+  CLOSED
+
+  """
+  The tracked issue is open
+  """
+  OPEN
+}
+
+"""
+Autogenerated input type of TransferEnterpriseOrganization
+"""
+input TransferEnterpriseOrganizationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise where the organization should be transferred.
+  """
+  destinationEnterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The ID of the organization to transfer.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of TransferEnterpriseOrganization
+"""
+type TransferEnterpriseOrganizationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The organization for which a transfer was initiated.
+  """
+  organization: Organization
+}
+
+"""
+Autogenerated input type of TransferIssue
+"""
+input TransferIssueInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Whether to create labels if they don't exist in the target repository (matched by name)
+  """
+  createLabelsIfMissing: Boolean = false
+
+  """
+  The Node ID of the issue to be transferred
+  """
+  issueId: ID! @possibleTypes(concreteTypes: ["Issue"])
+
+  """
+  The Node ID of the repository the issue should be transferred to
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of TransferIssue
+"""
+type TransferIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue that was transferred
+  """
+  issue: Issue
+}
+
+"""
+Represents a 'transferred' event on a given issue or pull request.
+"""
+type TransferredEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The repository this came from
+  """
+  fromRepository: Repository
+
+  """
+  The Node ID of the TransferredEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the issue associated with the event.
+  """
+  issue: Issue!
+}
+
+"""
+Represents a Git tree.
+"""
+type Tree implements GitObject & Node {
+  """
+  An abbreviated version of the Git object ID
+  """
+  abbreviatedOid: String!
+
+  """
+  The HTTP path for this Git object
+  """
+  commitResourcePath: URI!
+
+  """
+  The HTTP URL for this Git object
+  """
+  commitUrl: URI!
+
+  """
+  A list of tree entries.
+  """
+  entries: [TreeEntry!]
+
+  """
+  The Node ID of the Tree object
+  """
+  id: ID!
+
+  """
+  The Git object ID
+  """
+  oid: GitObjectID!
+
+  """
+  The Repository the Git object belongs to
+  """
+  repository: Repository!
+}
+
+"""
+Represents a Git tree entry.
+"""
+type TreeEntry {
+  """
+  The extension of the file
+  """
+  extension: String
+
+  """
+  Whether or not this tree entry is generated
+  """
+  isGenerated: Boolean!
+
+  """
+  The programming language this file is written in.
+  """
+  language: Language
+
+  """
+  Number of lines in the file.
+  """
+  lineCount: Int
+
+  """
+  Entry file mode.
+  """
+  mode: Int!
+
+  """
+  Entry file name.
+  """
+  name: String!
+
+  """
+  Entry file name. (Base64-encoded)
+  """
+  nameRaw: Base64String!
+
+  """
+  Entry file object.
+  """
+  object: GitObject
+
+  """
+  Entry file Git object ID.
+  """
+  oid: GitObjectID!
+
+  """
+  The full path of the file.
+  """
+  path: String
+
+  """
+  The full path of the file. (Base64-encoded)
+  """
+  pathRaw: Base64String
+
+  """
+  The Repository the tree entry belongs to
+  """
+  repository: Repository!
+
+  """
+  Entry byte size
+  """
+  size: Int!
+
+  """
+  If the TreeEntry is for a directory occupied by a submodule project, this returns the corresponding submodule
+  """
+  submodule: Submodule
+
+  """
+  Entry file type.
+  """
+  type: String!
+}
+
+"""
+An RFC 3986, RFC 3987, and RFC 6570 (level 4) compliant URI string.
+"""
+scalar URI
+
+"""
+Autogenerated input type of UnarchiveProjectV2Item
+"""
+input UnarchiveProjectV2ItemInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the ProjectV2Item to unarchive.
+  """
+  itemId: ID! @possibleTypes(concreteTypes: ["ProjectV2Item"])
+
+  """
+  The ID of the Project to archive the item from.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of UnarchiveProjectV2Item
+"""
+type UnarchiveProjectV2ItemPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The item unarchived from the project.
+  """
+  item: ProjectV2Item
+}
+
+"""
+Autogenerated input type of UnarchiveRepository
+"""
+input UnarchiveRepositoryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the repository to unarchive.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of UnarchiveRepository
+"""
+type UnarchiveRepositoryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The repository that was unarchived.
+  """
+  repository: Repository
+}
+
+"""
+Represents an 'unassigned' event on any assignable object.
+"""
+type UnassignedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the assignable associated with the event.
+  """
+  assignable: Assignable!
+
+  """
+  Identifies the user or mannequin that was unassigned.
+  """
+  assignee: Assignee
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the UnassignedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the subject (user) who was unassigned.
+  """
+  user: User
+    @deprecated(reason: "Assignees can now be mannequins. Use the `assignee` field instead. Removal on 2020-01-01 UTC.")
+}
+
+"""
+Autogenerated input type of UnfollowOrganization
+"""
+input UnfollowOrganizationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the organization to unfollow.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of UnfollowOrganization
+"""
+type UnfollowOrganizationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The organization that was unfollowed.
+  """
+  organization: Organization
+}
+
+"""
+Autogenerated input type of UnfollowUser
+"""
+input UnfollowUserInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the user to unfollow.
+  """
+  userId: ID! @possibleTypes(concreteTypes: ["User"])
+}
+
+"""
+Autogenerated return type of UnfollowUser
+"""
+type UnfollowUserPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The user that was unfollowed.
+  """
+  user: User
+}
+
+"""
+Represents a type that can be retrieved by a URL.
+"""
+interface UniformResourceLocatable {
+  """
+  The HTML path to this resource.
+  """
+  resourcePath: URI!
+
+  """
+  The URL to this resource.
+  """
+  url: URI!
+}
+
+"""
+Represents an unknown signature on a Commit or Tag.
+"""
+type UnknownSignature implements GitSignature {
+  """
+  Email used to sign this object.
+  """
+  email: String!
+
+  """
+  True if the signature is valid and verified by GitHub.
+  """
+  isValid: Boolean!
+
+  """
+  Payload for GPG signing object. Raw ODB object without the signature header.
+  """
+  payload: String!
+
+  """
+  ASCII-armored signature header from object.
+  """
+  signature: String!
+
+  """
+  GitHub user corresponding to the email signing this commit.
+  """
+  signer: User
+
+  """
+  The state of this signature. `VALID` if signature is valid and verified by
+  GitHub, otherwise represents reason why signature is considered invalid.
+  """
+  state: GitSignatureState!
+
+  """
+  True if the signature was made with GitHub's signing key.
+  """
+  wasSignedByGitHub: Boolean!
+}
+
+"""
+Represents an 'unlabeled' event on a given issue or pull request.
+"""
+type UnlabeledEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the UnlabeledEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the label associated with the 'unlabeled' event.
+  """
+  label: Label!
+
+  """
+  Identifies the `Labelable` associated with the event.
+  """
+  labelable: Labelable!
+}
+
+"""
+Autogenerated input type of UnlinkProjectV2FromRepository
+"""
+input UnlinkProjectV2FromRepositoryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the project to unlink from the repository.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  The ID of the repository to unlink from the project.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of UnlinkProjectV2FromRepository
+"""
+type UnlinkProjectV2FromRepositoryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The repository the project is no longer linked to.
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of UnlinkProjectV2FromTeam
+"""
+input UnlinkProjectV2FromTeamInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the project to unlink from the team.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  The ID of the team to unlink from the project.
+  """
+  teamId: ID! @possibleTypes(concreteTypes: ["Team"])
+}
+
+"""
+Autogenerated return type of UnlinkProjectV2FromTeam
+"""
+type UnlinkProjectV2FromTeamPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The team the project is unlinked from
+  """
+  team: Team
+}
+
+"""
+Autogenerated input type of UnlinkRepositoryFromProject
+"""
+input UnlinkRepositoryFromProjectInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Project linked to the Repository.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["Project"])
+
+  """
+  The ID of the Repository linked to the Project.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of UnlinkRepositoryFromProject
+"""
+type UnlinkRepositoryFromProjectPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The linked Project.
+  """
+  project: Project
+
+  """
+  The linked Repository.
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of UnlockLockable
+"""
+input UnlockLockableInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the item to be unlocked.
+  """
+  lockableId: ID! @possibleTypes(concreteTypes: ["Discussion", "Issue", "PullRequest"], abstractType: "Lockable")
+}
+
+"""
+Autogenerated return type of UnlockLockable
+"""
+type UnlockLockablePayload {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The item that was unlocked.
+  """
+  unlockedRecord: Lockable
+}
+
+"""
+Represents an 'unlocked' event on a given issue or pull request.
+"""
+type UnlockedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the UnlockedEvent object
+  """
+  id: ID!
+
+  """
+  Object that was unlocked.
+  """
+  lockable: Lockable!
+}
+
+"""
+Autogenerated input type of UnmarkDiscussionCommentAsAnswer
+"""
+input UnmarkDiscussionCommentAsAnswerInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion comment to unmark as an answer.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["DiscussionComment"])
+}
+
+"""
+Autogenerated return type of UnmarkDiscussionCommentAsAnswer
+"""
+type UnmarkDiscussionCommentAsAnswerPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The discussion that includes the comment.
+  """
+  discussion: Discussion
+}
+
+"""
+Autogenerated input type of UnmarkFileAsViewed
+"""
+input UnmarkFileAsViewedInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The path of the file to mark as unviewed
+  """
+  path: String!
+
+  """
+  The Node ID of the pull request.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of UnmarkFileAsViewed
+"""
+type UnmarkFileAsViewedPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated pull request.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Autogenerated input type of UnmarkIssueAsDuplicate
+"""
+input UnmarkIssueAsDuplicateInput {
+  """
+  ID of the issue or pull request currently considered canonical/authoritative/original.
+  """
+  canonicalId: ID! @possibleTypes(concreteTypes: ["Issue", "PullRequest"], abstractType: "IssueOrPullRequest")
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the issue or pull request currently marked as a duplicate.
+  """
+  duplicateId: ID! @possibleTypes(concreteTypes: ["Issue", "PullRequest"], abstractType: "IssueOrPullRequest")
+}
+
+"""
+Autogenerated return type of UnmarkIssueAsDuplicate
+"""
+type UnmarkIssueAsDuplicatePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue or pull request that was marked as a duplicate.
+  """
+  duplicate: IssueOrPullRequest
+}
+
+"""
+Autogenerated input type of UnmarkProjectV2AsTemplate
+"""
+input UnmarkProjectV2AsTemplateInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Project to unmark as a template.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of UnmarkProjectV2AsTemplate
+"""
+type UnmarkProjectV2AsTemplatePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The project.
+  """
+  projectV2: ProjectV2
+}
+
+"""
+Represents an 'unmarked_as_duplicate' event on a given issue or pull request.
+"""
+type UnmarkedAsDuplicateEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  The authoritative issue or pull request which has been duplicated by another.
+  """
+  canonical: IssueOrPullRequest
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The issue or pull request which has been marked as a duplicate of another.
+  """
+  duplicate: IssueOrPullRequest
+
+  """
+  The Node ID of the UnmarkedAsDuplicateEvent object
+  """
+  id: ID!
+
+  """
+  Canonical and duplicate belong to different repositories.
+  """
+  isCrossRepository: Boolean!
+}
+
+"""
+Autogenerated input type of UnminimizeComment
+"""
+input UnminimizeCommentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the subject to modify.
+  """
+  subjectId: ID!
+    @possibleTypes(
+      concreteTypes: [
+        "CommitComment"
+        "DiscussionComment"
+        "GistComment"
+        "IssueComment"
+        "PullRequestReview"
+        "PullRequestReviewComment"
+      ]
+      abstractType: "Minimizable"
+    )
+}
+
+"""
+Autogenerated return type of UnminimizeComment
+"""
+type UnminimizeCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The comment that was unminimized.
+  """
+  unminimizedComment: Minimizable
+}
+
+"""
+Autogenerated input type of UnpinIssue
+"""
+input UnpinIssueInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the issue to be unpinned
+  """
+  issueId: ID! @possibleTypes(concreteTypes: ["Issue"])
+}
+
+"""
+Autogenerated return type of UnpinIssue
+"""
+type UnpinIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue that was unpinned
+  """
+  issue: Issue
+}
+
+"""
+Represents an 'unpinned' event on a given issue or pull request.
+"""
+type UnpinnedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the UnpinnedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the issue associated with the event.
+  """
+  issue: Issue!
+}
+
+"""
+Autogenerated input type of UnresolveReviewThread
+"""
+input UnresolveReviewThreadInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the thread to unresolve
+  """
+  threadId: ID! @possibleTypes(concreteTypes: ["PullRequestReviewThread"])
+}
+
+"""
+Autogenerated return type of UnresolveReviewThread
+"""
+type UnresolveReviewThreadPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The thread to resolve.
+  """
+  thread: PullRequestReviewThread
+}
+
+"""
+Represents an 'unsubscribed' event on a given `Subscribable`.
+"""
+type UnsubscribedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the UnsubscribedEvent object
+  """
+  id: ID!
+
+  """
+  Object referenced by event.
+  """
+  subscribable: Subscribable!
+}
+
+"""
+Entities that can be updated.
+"""
+interface Updatable {
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+}
+
+"""
+Comments that can be updated.
+"""
+interface UpdatableComment {
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+}
+
+"""
+Autogenerated input type of UpdateBranchProtectionRule
+"""
+input UpdateBranchProtectionRuleInput {
+  """
+  Can this branch be deleted.
+  """
+  allowsDeletions: Boolean
+
+  """
+  Are force pushes allowed on this branch.
+  """
+  allowsForcePushes: Boolean
+
+  """
+  Is branch creation a protected operation.
+  """
+  blocksCreations: Boolean
+
+  """
+  The global relay id of the branch protection rule to be updated.
+  """
+  branchProtectionRuleId: ID! @possibleTypes(concreteTypes: ["BranchProtectionRule"])
+
+  """
+  A list of User, Team, or App IDs allowed to bypass force push targeting matching branches.
+  """
+  bypassForcePushActorIds: [ID!]
+
+  """
+  A list of User, Team, or App IDs allowed to bypass pull requests targeting matching branches.
+  """
+  bypassPullRequestActorIds: [ID!]
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Will new commits pushed to matching branches dismiss pull request review approvals.
+  """
+  dismissesStaleReviews: Boolean
+
+  """
+  Can admins overwrite branch protection.
+  """
+  isAdminEnforced: Boolean
+
+  """
+  Whether users can pull changes from upstream when the branch is locked. Set to
+  `true` to allow fork syncing. Set to `false` to prevent fork syncing.
+  """
+  lockAllowsFetchAndMerge: Boolean
+
+  """
+  Whether to set the branch as read-only. If this is true, users will not be able to push to the branch.
+  """
+  lockBranch: Boolean
+
+  """
+  The glob-like pattern used to determine matching branches.
+  """
+  pattern: String
+
+  """
+  A list of User, Team, or App IDs allowed to push to matching branches.
+  """
+  pushActorIds: [ID!]
+
+  """
+  Whether the most recent push must be approved by someone other than the person who pushed it
+  """
+  requireLastPushApproval: Boolean
+
+  """
+  Number of approving reviews required to update matching branches.
+  """
+  requiredApprovingReviewCount: Int
+
+  """
+  The list of required deployment environments
+  """
+  requiredDeploymentEnvironments: [String!]
+
+  """
+  List of required status check contexts that must pass for commits to be accepted to matching branches.
+  """
+  requiredStatusCheckContexts: [String!]
+
+  """
+  The list of required status checks
+  """
+  requiredStatusChecks: [RequiredStatusCheckInput!]
+
+  """
+  Are approving reviews required to update matching branches.
+  """
+  requiresApprovingReviews: Boolean
+
+  """
+  Are reviews from code owners required to update matching branches.
+  """
+  requiresCodeOwnerReviews: Boolean
+
+  """
+  Are commits required to be signed.
+  """
+  requiresCommitSignatures: Boolean
+
+  """
+  Are conversations required to be resolved before merging.
+  """
+  requiresConversationResolution: Boolean
+
+  """
+  Are successful deployments required before merging.
+  """
+  requiresDeployments: Boolean
+
+  """
+  Are merge commits prohibited from being pushed to this branch.
+  """
+  requiresLinearHistory: Boolean
+
+  """
+  Are status checks required to update matching branches.
+  """
+  requiresStatusChecks: Boolean
+
+  """
+  Are branches required to be up to date before merging.
+  """
+  requiresStrictStatusChecks: Boolean
+
+  """
+  Is pushing to matching branches restricted.
+  """
+  restrictsPushes: Boolean
+
+  """
+  Is dismissal of pull request reviews restricted.
+  """
+  restrictsReviewDismissals: Boolean
+
+  """
+  A list of User, Team, or App IDs allowed to dismiss reviews on pull requests targeting matching branches.
+  """
+  reviewDismissalActorIds: [ID!]
+}
+
+"""
+Autogenerated return type of UpdateBranchProtectionRule
+"""
+type UpdateBranchProtectionRulePayload {
+  """
+  The newly created BranchProtectionRule.
+  """
+  branchProtectionRule: BranchProtectionRule
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of UpdateCheckRun
+"""
+input UpdateCheckRunInput {
+  """
+  Possible further actions the integrator can perform, which a user may trigger.
+  """
+  actions: [CheckRunAction!]
+
+  """
+  The node of the check.
+  """
+  checkRunId: ID! @possibleTypes(concreteTypes: ["CheckRun"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The time that the check run finished.
+  """
+  completedAt: DateTime
+
+  """
+  The final conclusion of the check.
+  """
+  conclusion: CheckConclusionState
+
+  """
+  The URL of the integrator's site that has the full details of the check.
+  """
+  detailsUrl: URI
+
+  """
+  A reference for the run on the integrator's system.
+  """
+  externalId: String
+
+  """
+  The name of the check.
+  """
+  name: String
+
+  """
+  Descriptive details about the run.
+  """
+  output: CheckRunOutput
+
+  """
+  The node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The time that the check run began.
+  """
+  startedAt: DateTime
+
+  """
+  The current status.
+  """
+  status: RequestableCheckStatusState
+}
+
+"""
+Autogenerated return type of UpdateCheckRun
+"""
+type UpdateCheckRunPayload {
+  """
+  The updated check run.
+  """
+  checkRun: CheckRun
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of UpdateCheckSuitePreferences
+"""
+input UpdateCheckSuitePreferencesInput {
+  """
+  The check suite preferences to modify.
+  """
+  autoTriggerPreferences: [CheckSuiteAutoTriggerPreference!]!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of UpdateCheckSuitePreferences
+"""
+type UpdateCheckSuitePreferencesPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated repository.
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of UpdateDiscussionComment
+"""
+input UpdateDiscussionCommentInput {
+  """
+  The new contents of the comment body.
+  """
+  body: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion comment to update.
+  """
+  commentId: ID! @possibleTypes(concreteTypes: ["DiscussionComment"])
+}
+
+"""
+Autogenerated return type of UpdateDiscussionComment
+"""
+type UpdateDiscussionCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The modified discussion comment.
+  """
+  comment: DiscussionComment
+}
+
+"""
+Autogenerated input type of UpdateDiscussion
+"""
+input UpdateDiscussionInput {
+  """
+  The new contents of the discussion body.
+  """
+  body: String
+
+  """
+  The Node ID of a discussion category within the same repository to change this discussion to.
+  """
+  categoryId: ID @possibleTypes(concreteTypes: ["DiscussionCategory"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion to update.
+  """
+  discussionId: ID! @possibleTypes(concreteTypes: ["Discussion"])
+
+  """
+  The new discussion title.
+  """
+  title: String
+}
+
+"""
+Autogenerated return type of UpdateDiscussion
+"""
+type UpdateDiscussionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The modified discussion.
+  """
+  discussion: Discussion
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseAdministratorRole
+"""
+input UpdateEnterpriseAdministratorRoleInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Enterprise which the admin belongs to.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The login of a administrator whose role is being changed.
+  """
+  login: String!
+
+  """
+  The new role for the Enterprise administrator.
+  """
+  role: EnterpriseAdministratorRole!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseAdministratorRole
+"""
+type UpdateEnterpriseAdministratorRolePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A message confirming the result of changing the administrator's role.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseAllowPrivateRepositoryForkingSetting
+"""
+input UpdateEnterpriseAllowPrivateRepositoryForkingSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the allow private repository forking setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the allow private repository forking policy on the enterprise.
+  """
+  policyValue: EnterpriseAllowPrivateRepositoryForkingPolicyValue
+
+  """
+  The value for the allow private repository forking setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseAllowPrivateRepositoryForkingSetting
+"""
+type UpdateEnterpriseAllowPrivateRepositoryForkingSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated allow private repository forking setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the allow private repository forking setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseDefaultRepositoryPermissionSetting
+"""
+input UpdateEnterpriseDefaultRepositoryPermissionSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the base repository permission setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the base repository permission setting on the enterprise.
+  """
+  settingValue: EnterpriseDefaultRepositoryPermissionSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseDefaultRepositoryPermissionSetting
+"""
+type UpdateEnterpriseDefaultRepositoryPermissionSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated base repository permission setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the base repository permission setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseMembersCanChangeRepositoryVisibilitySetting
+"""
+input UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the members can change repository visibility setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the members can change repository visibility setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseMembersCanChangeRepositoryVisibilitySetting
+"""
+type UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated members can change repository visibility setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the members can change repository visibility setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseMembersCanCreateRepositoriesSetting
+"""
+input UpdateEnterpriseMembersCanCreateRepositoriesSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the members can create repositories setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  Allow members to create internal repositories. Defaults to current value.
+  """
+  membersCanCreateInternalRepositories: Boolean
+
+  """
+  Allow members to create private repositories. Defaults to current value.
+  """
+  membersCanCreatePrivateRepositories: Boolean
+
+  """
+  Allow members to create public repositories. Defaults to current value.
+  """
+  membersCanCreatePublicRepositories: Boolean
+
+  """
+  When false, allow member organizations to set their own repository creation member privileges.
+  """
+  membersCanCreateRepositoriesPolicyEnabled: Boolean
+
+  """
+  Value for the members can create repositories setting on the enterprise. This
+  or the granular public/private/internal allowed fields (but not both) must be provided.
+  """
+  settingValue: EnterpriseMembersCanCreateRepositoriesSettingValue
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseMembersCanCreateRepositoriesSetting
+"""
+type UpdateEnterpriseMembersCanCreateRepositoriesSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated members can create repositories setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the members can create repositories setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseMembersCanDeleteIssuesSetting
+"""
+input UpdateEnterpriseMembersCanDeleteIssuesSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the members can delete issues setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the members can delete issues setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseMembersCanDeleteIssuesSetting
+"""
+type UpdateEnterpriseMembersCanDeleteIssuesSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated members can delete issues setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the members can delete issues setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseMembersCanDeleteRepositoriesSetting
+"""
+input UpdateEnterpriseMembersCanDeleteRepositoriesSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the members can delete repositories setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the members can delete repositories setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseMembersCanDeleteRepositoriesSetting
+"""
+type UpdateEnterpriseMembersCanDeleteRepositoriesSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated members can delete repositories setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the members can delete repositories setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseMembersCanInviteCollaboratorsSetting
+"""
+input UpdateEnterpriseMembersCanInviteCollaboratorsSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the members can invite collaborators setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the members can invite collaborators setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseMembersCanInviteCollaboratorsSetting
+"""
+type UpdateEnterpriseMembersCanInviteCollaboratorsSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated members can invite collaborators setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the members can invite collaborators setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseMembersCanMakePurchasesSetting
+"""
+input UpdateEnterpriseMembersCanMakePurchasesSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the members can make purchases setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the members can make purchases setting on the enterprise.
+  """
+  settingValue: EnterpriseMembersCanMakePurchasesSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseMembersCanMakePurchasesSetting
+"""
+type UpdateEnterpriseMembersCanMakePurchasesSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated members can make purchases setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the members can make purchases setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseMembersCanUpdateProtectedBranchesSetting
+"""
+input UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the members can update protected branches setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the members can update protected branches setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseMembersCanUpdateProtectedBranchesSetting
+"""
+type UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated members can update protected branches setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the members can update protected branches setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseMembersCanViewDependencyInsightsSetting
+"""
+input UpdateEnterpriseMembersCanViewDependencyInsightsSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the members can view dependency insights setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the members can view dependency insights setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseMembersCanViewDependencyInsightsSetting
+"""
+type UpdateEnterpriseMembersCanViewDependencyInsightsSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated members can view dependency insights setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the members can view dependency insights setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseOrganizationProjectsSetting
+"""
+input UpdateEnterpriseOrganizationProjectsSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the organization projects setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the organization projects setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseOrganizationProjectsSetting
+"""
+type UpdateEnterpriseOrganizationProjectsSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated organization projects setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the organization projects setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseOwnerOrganizationRole
+"""
+input UpdateEnterpriseOwnerOrganizationRoleInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Enterprise which the owner belongs to.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The ID of the organization for membership change.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+
+  """
+  The role to assume in the organization.
+  """
+  organizationRole: RoleInOrganization!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseOwnerOrganizationRole
+"""
+type UpdateEnterpriseOwnerOrganizationRolePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A message confirming the result of changing the owner's organization role.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseProfile
+"""
+input UpdateEnterpriseProfileInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The description of the enterprise.
+  """
+  description: String
+
+  """
+  The Enterprise ID to update.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The location of the enterprise.
+  """
+  location: String
+
+  """
+  The name of the enterprise.
+  """
+  name: String
+
+  """
+  The URL of the enterprise's website.
+  """
+  websiteUrl: String
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseProfile
+"""
+type UpdateEnterpriseProfilePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated enterprise.
+  """
+  enterprise: Enterprise
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseRepositoryProjectsSetting
+"""
+input UpdateEnterpriseRepositoryProjectsSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the repository projects setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the repository projects setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseRepositoryProjectsSetting
+"""
+type UpdateEnterpriseRepositoryProjectsSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated repository projects setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the repository projects setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseTeamDiscussionsSetting
+"""
+input UpdateEnterpriseTeamDiscussionsSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the team discussions setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the team discussions setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseTeamDiscussionsSetting
+"""
+type UpdateEnterpriseTeamDiscussionsSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated team discussions setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the team discussions setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseTwoFactorAuthenticationRequiredSetting
+"""
+input UpdateEnterpriseTwoFactorAuthenticationRequiredSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the two factor authentication required setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the two factor authentication required setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseTwoFactorAuthenticationRequiredSetting
+"""
+type UpdateEnterpriseTwoFactorAuthenticationRequiredSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated two factor authentication required setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the two factor authentication required setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnvironment
+"""
+input UpdateEnvironmentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The node ID of the environment.
+  """
+  environmentId: ID! @possibleTypes(concreteTypes: ["Environment"])
+
+  """
+  Whether deployments to this environment can be approved by the user who created the deployment.
+  """
+  preventSelfReview: Boolean
+
+  """
+  The ids of users or teams that can approve deployments to this environment
+  """
+  reviewers: [ID!]
+
+  """
+  The wait timer in minutes.
+  """
+  waitTimer: Int
+}
+
+"""
+Autogenerated return type of UpdateEnvironment
+"""
+type UpdateEnvironmentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated environment.
+  """
+  environment: Environment
+}
+
+"""
+Autogenerated input type of UpdateIpAllowListEnabledSetting
+"""
+input UpdateIpAllowListEnabledSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the owner on which to set the IP allow list enabled setting.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["App", "Enterprise", "Organization"], abstractType: "IpAllowListOwner")
+
+  """
+  The value for the IP allow list enabled setting.
+  """
+  settingValue: IpAllowListEnabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateIpAllowListEnabledSetting
+"""
+type UpdateIpAllowListEnabledSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The IP allow list owner on which the setting was updated.
+  """
+  owner: IpAllowListOwner
+}
+
+"""
+Autogenerated input type of UpdateIpAllowListEntry
+"""
+input UpdateIpAllowListEntryInput {
+  """
+  An IP address or range of addresses in CIDR notation.
+  """
+  allowListValue: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the IP allow list entry to update.
+  """
+  ipAllowListEntryId: ID! @possibleTypes(concreteTypes: ["IpAllowListEntry"])
+
+  """
+  Whether the IP allow list entry is active when an IP allow list is enabled.
+  """
+  isActive: Boolean!
+
+  """
+  An optional name for the IP allow list entry.
+  """
+  name: String
+}
+
+"""
+Autogenerated return type of UpdateIpAllowListEntry
+"""
+type UpdateIpAllowListEntryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The IP allow list entry that was updated.
+  """
+  ipAllowListEntry: IpAllowListEntry
+}
+
+"""
+Autogenerated input type of UpdateIpAllowListForInstalledAppsEnabledSetting
+"""
+input UpdateIpAllowListForInstalledAppsEnabledSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the owner.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["App", "Enterprise", "Organization"], abstractType: "IpAllowListOwner")
+
+  """
+  The value for the IP allow list configuration for installed GitHub Apps setting.
+  """
+  settingValue: IpAllowListForInstalledAppsEnabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateIpAllowListForInstalledAppsEnabledSetting
+"""
+type UpdateIpAllowListForInstalledAppsEnabledSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The IP allow list owner on which the setting was updated.
+  """
+  owner: IpAllowListOwner
+}
+
+"""
+Autogenerated input type of UpdateIssueComment
+"""
+input UpdateIssueCommentInput {
+  """
+  The updated text of the comment.
+  """
+  body: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the IssueComment to modify.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["IssueComment"])
+}
+
+"""
+Autogenerated return type of UpdateIssueComment
+"""
+type UpdateIssueCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated comment.
+  """
+  issueComment: IssueComment
+}
+
+"""
+Autogenerated input type of UpdateIssue
+"""
+input UpdateIssueInput {
+  """
+  An array of Node IDs of users for this issue.
+  """
+  assigneeIds: [ID!] @possibleTypes(concreteTypes: ["User"])
+
+  """
+  The body for the issue description.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Issue to modify.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["Issue"])
+
+  """
+  An array of Node IDs of labels for this issue.
+  """
+  labelIds: [ID!] @possibleTypes(concreteTypes: ["Label"])
+
+  """
+  The Node ID of the milestone for this issue.
+  """
+  milestoneId: ID @possibleTypes(concreteTypes: ["Milestone"])
+
+  """
+  An array of Node IDs for projects associated with this issue.
+  """
+  projectIds: [ID!]
+
+  """
+  The desired issue state.
+  """
+  state: IssueState
+
+  """
+  The title for the issue.
+  """
+  title: String
+}
+
+"""
+Autogenerated return type of UpdateIssue
+"""
+type UpdateIssuePayload {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue.
+  """
+  issue: Issue
+}
+
+"""
+Autogenerated input type of UpdateLabel
+"""
+input UpdateLabelInput @preview(toggledBy: "bane-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A 6 character hex code, without the leading #, identifying the updated color of the label.
+  """
+  color: String
+
+  """
+  A brief description of the label, such as its purpose.
+  """
+  description: String
+
+  """
+  The Node ID of the label to be updated.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["Label"])
+
+  """
+  The updated name of the label.
+  """
+  name: String
+}
+
+"""
+Autogenerated return type of UpdateLabel
+"""
+type UpdateLabelPayload @preview(toggledBy: "bane-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated label.
+  """
+  label: Label
+}
+
+"""
+Autogenerated input type of UpdateNotificationRestrictionSetting
+"""
+input UpdateNotificationRestrictionSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the owner on which to set the restrict notifications setting.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Enterprise", "Organization"], abstractType: "VerifiableDomainOwner")
+
+  """
+  The value for the restrict notifications setting.
+  """
+  settingValue: NotificationRestrictionSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateNotificationRestrictionSetting
+"""
+type UpdateNotificationRestrictionSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The owner on which the setting was updated.
+  """
+  owner: VerifiableDomainOwner
+}
+
+"""
+Autogenerated input type of UpdateOrganizationAllowPrivateRepositoryForkingSetting
+"""
+input UpdateOrganizationAllowPrivateRepositoryForkingSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Enable forking of private repositories in the organization?
+  """
+  forkingEnabled: Boolean!
+
+  """
+  The ID of the organization on which to set the allow private repository forking setting.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of UpdateOrganizationAllowPrivateRepositoryForkingSetting
+"""
+type UpdateOrganizationAllowPrivateRepositoryForkingSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A message confirming the result of updating the allow private repository forking setting.
+  """
+  message: String
+
+  """
+  The organization with the updated allow private repository forking setting.
+  """
+  organization: Organization
+}
+
+"""
+Autogenerated input type of UpdateOrganizationWebCommitSignoffSetting
+"""
+input UpdateOrganizationWebCommitSignoffSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the organization on which to set the web commit signoff setting.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+
+  """
+  Enable signoff on web-based commits for repositories in the organization?
+  """
+  webCommitSignoffRequired: Boolean!
+}
+
+"""
+Autogenerated return type of UpdateOrganizationWebCommitSignoffSetting
+"""
+type UpdateOrganizationWebCommitSignoffSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A message confirming the result of updating the web commit signoff setting.
+  """
+  message: String
+
+  """
+  The organization with the updated web commit signoff setting.
+  """
+  organization: Organization
+}
+
+"""
+Only allow users with bypass permission to update matching refs.
+"""
+type UpdateParameters {
+  """
+  Branch can pull changes from its upstream repository
+  """
+  updateAllowsFetchAndMerge: Boolean!
+}
+
+"""
+Only allow users with bypass permission to update matching refs.
+"""
+input UpdateParametersInput {
+  """
+  Branch can pull changes from its upstream repository
+  """
+  updateAllowsFetchAndMerge: Boolean!
+}
+
+"""
+Autogenerated input type of UpdatePatreonSponsorability
+"""
+input UpdatePatreonSponsorabilityInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Whether Patreon tiers should be shown on the GitHub Sponsors profile page,
+  allowing potential sponsors to make their payment through Patreon instead of GitHub.
+  """
+  enablePatreonSponsorships: Boolean!
+
+  """
+  The username of the organization with the GitHub Sponsors profile, if any.
+  Defaults to the GitHub Sponsors profile for the authenticated user if omitted.
+  """
+  sponsorableLogin: String
+}
+
+"""
+Autogenerated return type of UpdatePatreonSponsorability
+"""
+type UpdatePatreonSponsorabilityPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The GitHub Sponsors profile.
+  """
+  sponsorsListing: SponsorsListing
+}
+
+"""
+Autogenerated input type of UpdateProjectCard
+"""
+input UpdateProjectCardInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Whether or not the ProjectCard should be archived
+  """
+  isArchived: Boolean
+
+  """
+  The note of ProjectCard.
+  """
+  note: String
+
+  """
+  The ProjectCard ID to update.
+  """
+  projectCardId: ID! @possibleTypes(concreteTypes: ["ProjectCard"])
+}
+
+"""
+Autogenerated return type of UpdateProjectCard
+"""
+type UpdateProjectCardPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated ProjectCard.
+  """
+  projectCard: ProjectCard
+}
+
+"""
+Autogenerated input type of UpdateProjectColumn
+"""
+input UpdateProjectColumnInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of project column.
+  """
+  name: String!
+
+  """
+  The ProjectColumn ID to update.
+  """
+  projectColumnId: ID! @possibleTypes(concreteTypes: ["ProjectColumn"])
+}
+
+"""
+Autogenerated return type of UpdateProjectColumn
+"""
+type UpdateProjectColumnPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated project column.
+  """
+  projectColumn: ProjectColumn
+}
+
+"""
+Autogenerated input type of UpdateProject
+"""
+input UpdateProjectInput {
+  """
+  The description of project.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of project.
+  """
+  name: String
+
+  """
+  The Project ID to update.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["Project"])
+
+  """
+  Whether the project is public or not.
+  """
+  public: Boolean
+
+  """
+  Whether the project is open or closed.
+  """
+  state: ProjectState
+}
+
+"""
+Autogenerated return type of UpdateProject
+"""
+type UpdateProjectPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated project.
+  """
+  project: Project
+}
+
+"""
+Autogenerated input type of UpdateProjectV2Collaborators
+"""
+input UpdateProjectV2CollaboratorsInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The collaborators to update.
+  """
+  collaborators: [ProjectV2Collaborator!]!
+
+  """
+  The ID of the project to update the collaborators for.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of UpdateProjectV2Collaborators
+"""
+type UpdateProjectV2CollaboratorsPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The collaborators granted a role
+  """
+  collaborators(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2ActorConnection
+}
+
+"""
+Autogenerated input type of UpdateProjectV2DraftIssue
+"""
+input UpdateProjectV2DraftIssueInput {
+  """
+  The IDs of the assignees of the draft issue.
+  """
+  assigneeIds: [ID!] @possibleTypes(concreteTypes: ["User"])
+
+  """
+  The body of the draft issue.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the draft issue to update.
+  """
+  draftIssueId: ID! @possibleTypes(concreteTypes: ["DraftIssue"])
+
+  """
+  The title of the draft issue.
+  """
+  title: String
+}
+
+"""
+Autogenerated return type of UpdateProjectV2DraftIssue
+"""
+type UpdateProjectV2DraftIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The draft issue updated in the project.
+  """
+  draftIssue: DraftIssue
+}
+
+"""
+Autogenerated input type of UpdateProjectV2
+"""
+input UpdateProjectV2Input {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Set the project to closed or open.
+  """
+  closed: Boolean
+
+  """
+  The ID of the Project to update.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  Set the project to public or private.
+  """
+  public: Boolean
+
+  """
+  Set the readme description of the project.
+  """
+  readme: String
+
+  """
+  Set the short description of the project.
+  """
+  shortDescription: String
+
+  """
+  Set the title of the project.
+  """
+  title: String
+}
+
+"""
+Autogenerated input type of UpdateProjectV2ItemFieldValue
+"""
+input UpdateProjectV2ItemFieldValueInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the field to be updated.
+  """
+  fieldId: ID!
+    @possibleTypes(
+      concreteTypes: ["ProjectV2Field", "ProjectV2IterationField", "ProjectV2SingleSelectField"]
+      abstractType: "ProjectV2FieldConfiguration"
+    )
+
+  """
+  The ID of the item to be updated.
+  """
+  itemId: ID! @possibleTypes(concreteTypes: ["ProjectV2Item"])
+
+  """
+  The ID of the Project.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  The value which will be set on the field.
+  """
+  value: ProjectV2FieldValue!
+}
+
+"""
+Autogenerated return type of UpdateProjectV2ItemFieldValue
+"""
+type UpdateProjectV2ItemFieldValuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated item.
+  """
+  projectV2Item: ProjectV2Item
+}
+
+"""
+Autogenerated input type of UpdateProjectV2ItemPosition
+"""
+input UpdateProjectV2ItemPositionInput {
+  """
+  The ID of the item to position this item after. If omitted or set to null the item will be moved to top.
+  """
+  afterId: ID @possibleTypes(concreteTypes: ["ProjectV2Item"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the item to be moved.
+  """
+  itemId: ID! @possibleTypes(concreteTypes: ["ProjectV2Item"])
+
+  """
+  The ID of the Project.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of UpdateProjectV2ItemPosition
+"""
+type UpdateProjectV2ItemPositionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The items in the new order
+  """
+  items(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2ItemConnection
+}
+
+"""
+Autogenerated return type of UpdateProjectV2
+"""
+type UpdateProjectV2Payload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated Project.
+  """
+  projectV2: ProjectV2
+}
+
+"""
+Autogenerated input type of UpdatePullRequestBranch
+"""
+input UpdatePullRequestBranchInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The head ref oid for the upstream branch.
+  """
+  expectedHeadOid: GitObjectID
+
+  """
+  The Node ID of the pull request.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+
+  """
+  The update branch method to use. If omitted, defaults to 'MERGE'
+  """
+  updateMethod: PullRequestBranchUpdateMethod
+}
+
+"""
+Autogenerated return type of UpdatePullRequestBranch
+"""
+type UpdatePullRequestBranchPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated pull request.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Autogenerated input type of UpdatePullRequest
+"""
+input UpdatePullRequestInput {
+  """
+  An array of Node IDs of users for this pull request.
+  """
+  assigneeIds: [ID!] @possibleTypes(concreteTypes: ["User"])
+
+  """
+  The name of the branch you want your changes pulled into. This should be an existing branch
+  on the current repository.
+  """
+  baseRefName: String
+
+  """
+  The contents of the pull request.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  An array of Node IDs of labels for this pull request.
+  """
+  labelIds: [ID!] @possibleTypes(concreteTypes: ["Label"])
+
+  """
+  Indicates whether maintainers can modify the pull request.
+  """
+  maintainerCanModify: Boolean
+
+  """
+  The Node ID of the milestone for this pull request.
+  """
+  milestoneId: ID @possibleTypes(concreteTypes: ["Milestone"])
+
+  """
+  An array of Node IDs for projects associated with this pull request.
+  """
+  projectIds: [ID!]
+
+  """
+  The Node ID of the pull request.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+
+  """
+  The target state of the pull request.
+  """
+  state: PullRequestUpdateState
+
+  """
+  The title of the pull request.
+  """
+  title: String
+}
+
+"""
+Autogenerated return type of UpdatePullRequest
+"""
+type UpdatePullRequestPayload {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated pull request.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Autogenerated input type of UpdatePullRequestReviewComment
+"""
+input UpdatePullRequestReviewCommentInput {
+  """
+  The text of the comment.
+  """
+  body: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the comment to modify.
+  """
+  pullRequestReviewCommentId: ID! @possibleTypes(concreteTypes: ["PullRequestReviewComment"])
+}
+
+"""
+Autogenerated return type of UpdatePullRequestReviewComment
+"""
+type UpdatePullRequestReviewCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated comment.
+  """
+  pullRequestReviewComment: PullRequestReviewComment
+}
+
+"""
+Autogenerated input type of UpdatePullRequestReview
+"""
+input UpdatePullRequestReviewInput {
+  """
+  The contents of the pull request review body.
+  """
+  body: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the pull request review to modify.
+  """
+  pullRequestReviewId: ID! @possibleTypes(concreteTypes: ["PullRequestReview"])
+}
+
+"""
+Autogenerated return type of UpdatePullRequestReview
+"""
+type UpdatePullRequestReviewPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated pull request review.
+  """
+  pullRequestReview: PullRequestReview
+}
+
+"""
+Autogenerated input type of UpdateRef
+"""
+input UpdateRefInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Permit updates of branch Refs that are not fast-forwards?
+  """
+  force: Boolean = false
+
+  """
+  The GitObjectID that the Ref shall be updated to target.
+  """
+  oid: GitObjectID!
+
+  """
+  The Node ID of the Ref to be updated.
+  """
+  refId: ID! @possibleTypes(concreteTypes: ["Ref"])
+}
+
+"""
+Autogenerated return type of UpdateRef
+"""
+type UpdateRefPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated Ref.
+  """
+  ref: Ref
+}
+
+"""
+Autogenerated input type of UpdateRefs
+"""
+input UpdateRefsInput @preview(toggledBy: "update-refs-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A list of ref updates.
+  """
+  refUpdates: [RefUpdate!]!
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of UpdateRefs
+"""
+type UpdateRefsPayload @preview(toggledBy: "update-refs-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of UpdateRepository
+"""
+input UpdateRepositoryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A new description for the repository. Pass an empty string to erase the existing description.
+  """
+  description: String
+
+  """
+  Indicates if the repository should have the discussions feature enabled.
+  """
+  hasDiscussionsEnabled: Boolean
+
+  """
+  Indicates if the repository should have the issues feature enabled.
+  """
+  hasIssuesEnabled: Boolean
+
+  """
+  Indicates if the repository should have the project boards feature enabled.
+  """
+  hasProjectsEnabled: Boolean
+
+  """
+  Indicates if the repository should have the wiki feature enabled.
+  """
+  hasWikiEnabled: Boolean
+
+  """
+  The URL for a web page about this repository. Pass an empty string to erase the existing URL.
+  """
+  homepageUrl: URI
+
+  """
+  The new name of the repository.
+  """
+  name: String
+
+  """
+  The ID of the repository to update.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  Whether this repository should be marked as a template such that anyone who
+  can access it can create new repositories with the same files and directory structure.
+  """
+  template: Boolean
+}
+
+"""
+Autogenerated return type of UpdateRepository
+"""
+type UpdateRepositoryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated repository.
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of UpdateRepositoryRuleset
+"""
+input UpdateRepositoryRulesetInput {
+  """
+  A list of actors that are allowed to bypass rules in this ruleset.
+  """
+  bypassActors: [RepositoryRulesetBypassActorInput!]
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The list of conditions for this ruleset
+  """
+  conditions: RepositoryRuleConditionsInput
+
+  """
+  The enforcement level for this ruleset
+  """
+  enforcement: RuleEnforcement
+
+  """
+  The name of the ruleset.
+  """
+  name: String
+
+  """
+  The global relay id of the repository ruleset to be updated.
+  """
+  repositoryRulesetId: ID! @possibleTypes(concreteTypes: ["RepositoryRuleset"])
+
+  """
+  The list of rules for this ruleset
+  """
+  rules: [RepositoryRuleInput!]
+
+  """
+  The target of the ruleset.
+  """
+  target: RepositoryRulesetTarget
+}
+
+"""
+Autogenerated return type of UpdateRepositoryRuleset
+"""
+type UpdateRepositoryRulesetPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The newly created Ruleset.
+  """
+  ruleset: RepositoryRuleset
+}
+
+"""
+Autogenerated input type of UpdateRepositoryWebCommitSignoffSetting
+"""
+input UpdateRepositoryWebCommitSignoffSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the repository to update.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  Indicates if the repository should require signoff on web-based commits.
+  """
+  webCommitSignoffRequired: Boolean!
+}
+
+"""
+Autogenerated return type of UpdateRepositoryWebCommitSignoffSetting
+"""
+type UpdateRepositoryWebCommitSignoffSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A message confirming the result of updating the web commit signoff setting.
+  """
+  message: String
+
+  """
+  The updated repository.
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of UpdateSponsorshipPreferences
+"""
+input UpdateSponsorshipPreferencesInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Specify whether others should be able to see that the sponsor is sponsoring
+  the sponsorable. Public visibility still does not reveal which tier is used.
+  """
+  privacyLevel: SponsorshipPrivacy = PUBLIC
+
+  """
+  Whether the sponsor should receive email updates from the sponsorable.
+  """
+  receiveEmails: Boolean = true
+
+  """
+  The ID of the user or organization who is acting as the sponsor, paying for
+  the sponsorship. Required if sponsorLogin is not given.
+  """
+  sponsorId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "Sponsor")
+
+  """
+  The username of the user or organization who is acting as the sponsor, paying
+  for the sponsorship. Required if sponsorId is not given.
+  """
+  sponsorLogin: String
+
+  """
+  The ID of the user or organization who is receiving the sponsorship. Required if sponsorableLogin is not given.
+  """
+  sponsorableId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "Sponsorable")
+
+  """
+  The username of the user or organization who is receiving the sponsorship. Required if sponsorableId is not given.
+  """
+  sponsorableLogin: String
+}
+
+"""
+Autogenerated return type of UpdateSponsorshipPreferences
+"""
+type UpdateSponsorshipPreferencesPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The sponsorship that was updated.
+  """
+  sponsorship: Sponsorship
+}
+
+"""
+Autogenerated input type of UpdateSubscription
+"""
+input UpdateSubscriptionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new state of the subscription.
+  """
+  state: SubscriptionState!
+
+  """
+  The Node ID of the subscribable object to modify.
+  """
+  subscribableId: ID!
+    @possibleTypes(
+      concreteTypes: ["Commit", "Discussion", "Issue", "PullRequest", "Repository", "Team", "TeamDiscussion"]
+      abstractType: "Subscribable"
+    )
+}
+
+"""
+Autogenerated return type of UpdateSubscription
+"""
+type UpdateSubscriptionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The input subscribable entity.
+  """
+  subscribable: Subscribable
+}
+
+"""
+Autogenerated input type of UpdateTeamDiscussionComment
+"""
+input UpdateTeamDiscussionCommentInput {
+  """
+  The updated text of the comment.
+  """
+  body: String!
+
+  """
+  The current version of the body content.
+  """
+  bodyVersion: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the comment to modify.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["TeamDiscussionComment"])
+}
+
+"""
+Autogenerated return type of UpdateTeamDiscussionComment
+"""
+type UpdateTeamDiscussionCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated comment.
+  """
+  teamDiscussionComment: TeamDiscussionComment
+}
+
+"""
+Autogenerated input type of UpdateTeamDiscussion
+"""
+input UpdateTeamDiscussionInput {
+  """
+  The updated text of the discussion.
+  """
+  body: String
+
+  """
+  The current version of the body content. If provided, this update operation
+  will be rejected if the given version does not match the latest version on the server.
+  """
+  bodyVersion: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion to modify.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["TeamDiscussion"])
+
+  """
+  If provided, sets the pinned state of the updated discussion.
+  """
+  pinned: Boolean
+
+  """
+  The updated title of the discussion.
+  """
+  title: String
+}
+
+"""
+Autogenerated return type of UpdateTeamDiscussion
+"""
+type UpdateTeamDiscussionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated discussion.
+  """
+  teamDiscussion: TeamDiscussion
+}
+
+"""
+Autogenerated input type of UpdateTeamReviewAssignment
+"""
+input UpdateTeamReviewAssignmentInput @preview(toggledBy: "stone-crop-preview") {
+  """
+  The algorithm to use for review assignment
+  """
+  algorithm: TeamReviewAssignmentAlgorithm = ROUND_ROBIN
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Turn on or off review assignment
+  """
+  enabled: Boolean!
+
+  """
+  An array of team member IDs to exclude
+  """
+  excludedTeamMemberIds: [ID!] @possibleTypes(concreteTypes: ["User"])
+
+  """
+  The Node ID of the team to update review assignments of
+  """
+  id: ID! @possibleTypes(concreteTypes: ["Team"])
+
+  """
+  Notify the entire team of the PR if it is delegated
+  """
+  notifyTeam: Boolean = true
+
+  """
+  The number of team members to assign
+  """
+  teamMemberCount: Int = 1
+}
+
+"""
+Autogenerated return type of UpdateTeamReviewAssignment
+"""
+type UpdateTeamReviewAssignmentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The team that was modified
+  """
+  team: Team
+}
+
+"""
+Autogenerated input type of UpdateTeamsRepository
+"""
+input UpdateTeamsRepositoryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Permission that should be granted to the teams.
+  """
+  permission: RepositoryPermission!
+
+  """
+  Repository ID being granted access to.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  A list of teams being granted access. Limit: 10
+  """
+  teamIds: [ID!]! @possibleTypes(concreteTypes: ["Team"])
+}
+
+"""
+Autogenerated return type of UpdateTeamsRepository
+"""
+type UpdateTeamsRepositoryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The repository that was updated.
+  """
+  repository: Repository
+
+  """
+  The teams granted permission on the repository.
+  """
+  teams: [Team!]
+}
+
+"""
+Autogenerated input type of UpdateTopics
+"""
+input UpdateTopicsInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  An array of topic names.
+  """
+  topicNames: [String!]!
+}
+
+"""
+Autogenerated return type of UpdateTopics
+"""
+type UpdateTopicsPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Names of the provided topics that are not valid.
+  """
+  invalidTopicNames: [String!]
+
+  """
+  The updated repository.
+  """
+  repository: Repository
+}
+
+"""
+A user is an individual's account on GitHub that owns repositories and can make new content.
+"""
+type User implements Actor & Node & PackageOwner & ProfileOwner & ProjectOwner & ProjectV2Owner & ProjectV2Recent & RepositoryDiscussionAuthor & RepositoryDiscussionCommentAuthor & RepositoryOwner & Sponsorable & UniformResourceLocatable {
+  """
+  Determine if this repository owner has any items that can be pinned to their profile.
+  """
+  anyPinnableItems(
+    """
+    Filter to only a particular kind of pinnable item.
+    """
+    type: PinnableItemType
+  ): Boolean!
+
+  """
+  A URL pointing to the user's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  The user's public profile bio.
+  """
+  bio: String
+
+  """
+  The user's public profile bio as HTML.
+  """
+  bioHTML: HTML!
+
+  """
+  Could this user receive email notifications, if the organization had notification restrictions enabled?
+  """
+  canReceiveOrganizationEmailsWhenNotificationsRestricted(
+    """
+    The login of the organization to check.
+    """
+    login: String!
+  ): Boolean!
+
+  """
+  A list of commit comments made by this user.
+  """
+  commitComments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CommitCommentConnection!
+
+  """
+  The user's public profile company.
+  """
+  company: String
+
+  """
+  The user's public profile company as HTML.
+  """
+  companyHTML: HTML!
+
+  """
+  The collection of contributions this user has made to different repositories.
+  """
+  contributionsCollection(
+    """
+    Only contributions made at this time or later will be counted. If omitted, defaults to a year ago.
+    """
+    from: DateTime
+
+    """
+    The ID of the organization used to filter contributions.
+    """
+    organizationID: ID
+
+    """
+    Only contributions made before and up to (including) this time will be
+    counted. If omitted, defaults to the current time or one year from the
+    provided from argument.
+    """
+    to: DateTime
+  ): ContributionsCollection!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The user's publicly visible profile email.
+  """
+  email: String!
+
+  """
+  A list of enterprises that the user belongs to.
+  """
+  enterprises(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter enterprises returned based on the user's membership type.
+    """
+    membershipType: EnterpriseMembershipType = ALL
+
+    """
+    Ordering options for the User's enterprises.
+    """
+    orderBy: EnterpriseOrder = {field: NAME, direction: ASC}
+  ): EnterpriseConnection
+
+  """
+  The estimated next GitHub Sponsors payout for this user/organization in cents (USD).
+  """
+  estimatedNextSponsorsPayoutInCents: Int!
+
+  """
+  A list of users the given user is followed by.
+  """
+  followers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): FollowerConnection!
+
+  """
+  A list of users the given user is following.
+  """
+  following(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): FollowingConnection!
+
+  """
+  Find gist by repo name.
+  """
+  gist(
+    """
+    The gist name to find.
+    """
+    name: String!
+  ): Gist
+
+  """
+  A list of gist comments made by this user.
+  """
+  gistComments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): GistCommentConnection!
+
+  """
+  A list of the Gists the user has created.
+  """
+  gists(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for gists returned from the connection
+    """
+    orderBy: GistOrder
+
+    """
+    Filters Gists according to privacy.
+    """
+    privacy: GistPrivacy
+  ): GistConnection!
+
+  """
+  True if this user/organization has a GitHub Sponsors listing.
+  """
+  hasSponsorsListing: Boolean!
+
+  """
+  The hovercard information for this user in a given context
+  """
+  hovercard(
+    """
+    The ID of the subject to get the hovercard in the context of
+    """
+    primarySubjectId: ID
+  ): Hovercard!
+
+  """
+  The Node ID of the User object
+  """
+  id: ID!
+
+  """
+  The interaction ability settings for this user.
+  """
+  interactionAbility: RepositoryInteractionAbility
+
+  """
+  Whether or not this user is a participant in the GitHub Security Bug Bounty.
+  """
+  isBountyHunter: Boolean!
+
+  """
+  Whether or not this user is a participant in the GitHub Campus Experts Program.
+  """
+  isCampusExpert: Boolean!
+
+  """
+  Whether or not this user is a GitHub Developer Program member.
+  """
+  isDeveloperProgramMember: Boolean!
+
+  """
+  Whether or not this user is a GitHub employee.
+  """
+  isEmployee: Boolean!
+
+  """
+  Whether or not this user is following the viewer. Inverse of viewerIsFollowing
+  """
+  isFollowingViewer: Boolean!
+
+  """
+  Whether or not this user is a member of the GitHub Stars Program.
+  """
+  isGitHubStar: Boolean!
+
+  """
+  Whether or not the user has marked themselves as for hire.
+  """
+  isHireable: Boolean!
+
+  """
+  Whether or not this user is a site administrator.
+  """
+  isSiteAdmin: Boolean!
+
+  """
+  Whether the given account is sponsoring this user/organization.
+  """
+  isSponsoredBy(
+    """
+    The target account's login.
+    """
+    accountLogin: String!
+  ): Boolean!
+
+  """
+  True if the viewer is sponsored by this user/organization.
+  """
+  isSponsoringViewer: Boolean!
+
+  """
+  Whether or not this user is the viewing user.
+  """
+  isViewer: Boolean!
+
+  """
+  A list of issue comments made by this user.
+  """
+  issueComments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for issue comments returned from the connection.
+    """
+    orderBy: IssueCommentOrder
+  ): IssueCommentConnection!
+
+  """
+  A list of issues associated with this user.
+  """
+  issues(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filtering options for issues returned from the connection.
+    """
+    filterBy: IssueFilters
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for issues returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the issues by.
+    """
+    states: [IssueState!]
+  ): IssueConnection!
+
+  """
+  Showcases a selection of repositories and gists that the profile owner has
+  either curated or that have been selected automatically based on popularity.
+  """
+  itemShowcase: ProfileItemShowcase!
+
+  """
+  The user's public profile location.
+  """
+  location: String
+
+  """
+  The username used to login.
+  """
+  login: String!
+
+  """
+  The estimated monthly GitHub Sponsors income for this user/organization in cents (USD).
+  """
+  monthlyEstimatedSponsorsIncomeInCents: Int!
+
+  """
+  The user's public profile name.
+  """
+  name: String
+
+  """
+  Find an organization by its login that the user belongs to.
+  """
+  organization(
+    """
+    The login of the organization to find.
+    """
+    login: String!
+  ): Organization
+
+  """
+  Verified email addresses that match verified domains for a specified organization the user is a member of.
+  """
+  organizationVerifiedDomainEmails(
+    """
+    The login of the organization to match verified domains from.
+    """
+    login: String!
+  ): [String!]!
+
+  """
+  A list of organizations the user belongs to.
+  """
+  organizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the User's organizations.
+    """
+    orderBy: OrganizationOrder = null
+  ): OrganizationConnection!
+
+  """
+  A list of packages under the owner.
+  """
+  packages(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Find packages by their names.
+    """
+    names: [String]
+
+    """
+    Ordering of the returned packages.
+    """
+    orderBy: PackageOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    Filter registry package by type.
+    """
+    packageType: PackageType
+
+    """
+    Find packages in a repository by ID.
+    """
+    repositoryId: ID
+  ): PackageConnection!
+
+  """
+  A list of repositories and gists this profile owner can pin to their profile.
+  """
+  pinnableItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter the types of pinnable items that are returned.
+    """
+    types: [PinnableItemType!]
+  ): PinnableItemConnection!
+
+  """
+  A list of repositories and gists this profile owner has pinned to their profile
+  """
+  pinnedItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter the types of pinned items that are returned.
+    """
+    types: [PinnableItemType!]
+  ): PinnableItemConnection!
+
+  """
+  Returns how many more items this profile owner can pin to their profile.
+  """
+  pinnedItemsRemaining: Int!
+
+  """
+  Find project by number.
+  """
+  project(
+    """
+    The project number to find.
+    """
+    number: Int!
+  ): Project
+
+  """
+  Find a project by number.
+  """
+  projectV2(
+    """
+    The project number.
+    """
+    number: Int!
+  ): ProjectV2
+
+  """
+  A list of projects under the owner.
+  """
+  projects(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for projects returned from the connection
+    """
+    orderBy: ProjectOrder
+
+    """
+    Query to search projects by, currently only searching by name.
+    """
+    search: String
+
+    """
+    A list of states to filter the projects by.
+    """
+    states: [ProjectState!]
+  ): ProjectConnection!
+
+  """
+  The HTTP path listing user's projects
+  """
+  projectsResourcePath: URI!
+
+  """
+  The HTTP URL listing user's projects
+  """
+  projectsUrl: URI!
+
+  """
+  A list of projects under the owner.
+  """
+  projectsV2(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    How to order the returned projects.
+    """
+    orderBy: ProjectV2Order = {field: NUMBER, direction: DESC}
+
+    """
+    A project to search for under the the owner.
+    """
+    query: String
+  ): ProjectV2Connection!
+
+  """
+  The user's profile pronouns
+  """
+  pronouns: String
+
+  """
+  A list of public keys associated with this user.
+  """
+  publicKeys(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PublicKeyConnection!
+
+  """
+  A list of pull requests associated with this user.
+  """
+  pullRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    The base ref name to filter the pull requests by.
+    """
+    baseRefName: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    The head ref name to filter the pull requests by.
+    """
+    headRefName: String
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pull requests returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the pull requests by.
+    """
+    states: [PullRequestState!]
+  ): PullRequestConnection!
+
+  """
+  Recent projects that this user has modified in the context of the owner.
+  """
+  recentProjects(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2Connection!
+
+  """
+  A list of repositories that the user owns.
+  """
+  repositories(
+    """
+    Array of viewer's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    current viewer owns.
+    """
+    affiliations: [RepositoryAffiliation]
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    If non-null, filters repositories according to whether they have issues enabled
+    """
+    hasIssuesEnabled: Boolean
+
+    """
+    If non-null, filters repositories according to whether they are archived and not maintained
+    """
+    isArchived: Boolean
+
+    """
+    If non-null, filters repositories according to whether they are forks of another repository
+    """
+    isFork: Boolean
+
+    """
+    If non-null, filters repositories according to whether they have been locked
+    """
+    isLocked: Boolean
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder
+
+    """
+    Array of owner's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    organization or user being viewed owns.
+    """
+    ownerAffiliations: [RepositoryAffiliation] = [OWNER, COLLABORATOR]
+
+    """
+    If non-null, filters repositories according to privacy
+    """
+    privacy: RepositoryPrivacy
+  ): RepositoryConnection!
+
+  """
+  A list of repositories that the user recently contributed to.
+  """
+  repositoriesContributedTo(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    If non-null, include only the specified types of contributions. The
+    GitHub.com UI uses [COMMIT, ISSUE, PULL_REQUEST, REPOSITORY]
+    """
+    contributionTypes: [RepositoryContributionType]
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    If non-null, filters repositories according to whether they have issues enabled
+    """
+    hasIssues: Boolean
+
+    """
+    If true, include user repositories
+    """
+    includeUserRepositories: Boolean
+
+    """
+    If non-null, filters repositories according to whether they have been locked
+    """
+    isLocked: Boolean
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder
+
+    """
+    If non-null, filters repositories according to privacy
+    """
+    privacy: RepositoryPrivacy
+  ): RepositoryConnection!
+
+  """
+  Find Repository.
+  """
+  repository(
+    """
+    Follow repository renames. If disabled, a repository referenced by its old name will return an error.
+    """
+    followRenames: Boolean = true
+
+    """
+    Name of Repository to find.
+    """
+    name: String!
+  ): Repository
+
+  """
+  Discussion comments this user has authored.
+  """
+  repositoryDiscussionComments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter discussion comments to only those that were marked as the answer
+    """
+    onlyAnswers: Boolean = false
+
+    """
+    Filter discussion comments to only those in a specific repository.
+    """
+    repositoryId: ID
+  ): DiscussionCommentConnection!
+
+  """
+  Discussions this user has started.
+  """
+  repositoryDiscussions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Filter discussions to only those that have been answered or not. Defaults to
+    including both answered and unanswered discussions.
+    """
+    answered: Boolean = null
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for discussions returned from the connection.
+    """
+    orderBy: DiscussionOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    Filter discussions to only those in a specific repository.
+    """
+    repositoryId: ID
+
+    """
+    A list of states to filter the discussions by.
+    """
+    states: [DiscussionState!] = []
+  ): DiscussionConnection!
+
+  """
+  The HTTP path for this user
+  """
+  resourcePath: URI!
+
+  """
+  Replies this user has saved
+  """
+  savedReplies(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    The field to order saved replies by.
+    """
+    orderBy: SavedReplyOrder = {field: UPDATED_AT, direction: DESC}
+  ): SavedReplyConnection
+
+  """
+  The user's social media accounts, ordered as they appear on the user's profile.
+  """
+  socialAccounts(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): SocialAccountConnection!
+
+  """
+  List of users and organizations this entity is sponsoring.
+  """
+  sponsoring(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the users and organizations returned from the connection.
+    """
+    orderBy: SponsorOrder = {field: RELEVANCE, direction: DESC}
+  ): SponsorConnection!
+
+  """
+  List of sponsors for this user or organization.
+  """
+  sponsors(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsors returned from the connection.
+    """
+    orderBy: SponsorOrder = {field: RELEVANCE, direction: DESC}
+
+    """
+    If given, will filter for sponsors at the given tier. Will only return
+    sponsors whose tier the viewer is permitted to see.
+    """
+    tierId: ID
+  ): SponsorConnection!
+
+  """
+  Events involving this sponsorable, such as new sponsorships.
+  """
+  sponsorsActivities(
+    """
+    Filter activities to only the specified actions.
+    """
+    actions: [SponsorsActivityAction!] = []
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether to include those events where this sponsorable acted as the sponsor.
+    Defaults to only including events where this sponsorable was the recipient
+    of a sponsorship.
+    """
+    includeAsSponsor: Boolean = false
+
+    """
+    Whether or not to include private activities in the result set. Defaults to including public and private activities.
+    """
+    includePrivate: Boolean = true
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for activity returned from the connection.
+    """
+    orderBy: SponsorsActivityOrder = {field: TIMESTAMP, direction: DESC}
+
+    """
+    Filter activities returned to only those that occurred in the most recent
+    specified time period. Set to ALL to avoid filtering by when the activity
+    occurred. Will be ignored if `since` or `until` is given.
+    """
+    period: SponsorsActivityPeriod = MONTH
+
+    """
+    Filter activities to those that occurred on or after this time.
+    """
+    since: DateTime
+
+    """
+    Filter activities to those that occurred before this time.
+    """
+    until: DateTime
+  ): SponsorsActivityConnection!
+
+  """
+  The GitHub Sponsors listing for this user or organization.
+  """
+  sponsorsListing: SponsorsListing
+
+  """
+  The sponsorship from the viewer to this user/organization; that is, the sponsorship where you're the sponsor.
+  """
+  sponsorshipForViewerAsSponsor(
+    """
+    Whether to return the sponsorship only if it's still active. Pass false to
+    get the viewer's sponsorship back even if it has been cancelled.
+    """
+    activeOnly: Boolean = true
+  ): Sponsorship
+
+  """
+  The sponsorship from this user/organization to the viewer; that is, the sponsorship you're receiving.
+  """
+  sponsorshipForViewerAsSponsorable(
+    """
+    Whether to return the sponsorship only if it's still active. Pass false to
+    get the sponsorship back even if it has been cancelled.
+    """
+    activeOnly: Boolean = true
+  ): Sponsorship
+
+  """
+  List of sponsorship updates sent from this sponsorable to sponsors.
+  """
+  sponsorshipNewsletters(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsorship updates returned from the connection.
+    """
+    orderBy: SponsorshipNewsletterOrder = {field: CREATED_AT, direction: DESC}
+  ): SponsorshipNewsletterConnection!
+
+  """
+  The sponsorships where this user or organization is the maintainer receiving the funds.
+  """
+  sponsorshipsAsMaintainer(
+    """
+    Whether to include only sponsorships that are active right now, versus all
+    sponsorships this maintainer has ever received.
+    """
+    activeOnly: Boolean = true
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether or not to include private sponsorships in the result set
+    """
+    includePrivate: Boolean = false
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsorships returned from this connection. If left
+    blank, the sponsorships will be ordered based on relevancy to the viewer.
+    """
+    orderBy: SponsorshipOrder
+  ): SponsorshipConnection!
+
+  """
+  The sponsorships where this user or organization is the funder.
+  """
+  sponsorshipsAsSponsor(
+    """
+    Whether to include only sponsorships that are active right now, versus all sponsorships this sponsor has ever made.
+    """
+    activeOnly: Boolean = true
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter sponsorships returned to those for the specified maintainers. That
+    is, the recipient of the sponsorship is a user or organization with one of
+    the given logins.
+    """
+    maintainerLogins: [String!]
+
+    """
+    Ordering options for sponsorships returned from this connection. If left
+    blank, the sponsorships will be ordered based on relevancy to the viewer.
+    """
+    orderBy: SponsorshipOrder
+  ): SponsorshipConnection!
+
+  """
+  Repositories the user has starred.
+  """
+  starredRepositories(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: StarOrder
+
+    """
+    Filters starred repositories to only return repositories owned by the viewer.
+    """
+    ownedByViewer: Boolean
+  ): StarredRepositoryConnection!
+
+  """
+  The user's description of what they're currently doing.
+  """
+  status: UserStatus
+
+  """
+  Repositories the user has contributed to, ordered by contribution rank, plus repositories the user has created
+  """
+  topRepositories(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder!
+
+    """
+    How far back in time to fetch contributed repositories
+    """
+    since: DateTime
+  ): RepositoryConnection!
+
+  """
+  The amount in United States cents (e.g., 500 = $5.00 USD) that this entity has
+  spent on GitHub to fund sponsorships. Only returns a value when viewed by the
+  user themselves or by a user who can manage sponsorships for the requested organization.
+  """
+  totalSponsorshipAmountAsSponsorInCents(
+    """
+    Filter payments to those that occurred on or after this time.
+    """
+    since: DateTime
+
+    """
+    Filter payments to those made to the users or organizations with the specified usernames.
+    """
+    sponsorableLogins: [String!] = []
+
+    """
+    Filter payments to those that occurred before this time.
+    """
+    until: DateTime
+  ): Int
+
+  """
+  The user's Twitter username.
+  """
+  twitterUsername: String
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this user
+  """
+  url: URI!
+
+  """
+  Can the viewer pin repositories and gists to the profile?
+  """
+  viewerCanChangePinnedItems: Boolean!
+
+  """
+  Can the current viewer create new projects on this owner.
+  """
+  viewerCanCreateProjects: Boolean!
+
+  """
+  Whether or not the viewer is able to follow the user.
+  """
+  viewerCanFollow: Boolean!
+
+  """
+  Whether or not the viewer is able to sponsor this user/organization.
+  """
+  viewerCanSponsor: Boolean!
+
+  """
+  Whether or not this user is followed by the viewer. Inverse of isFollowingViewer.
+  """
+  viewerIsFollowing: Boolean!
+
+  """
+  True if the viewer is sponsoring this user/organization.
+  """
+  viewerIsSponsoring: Boolean!
+
+  """
+  A list of repositories the given user is watching.
+  """
+  watching(
+    """
+    Affiliation options for repositories returned from the connection. If none
+    specified, the results will include repositories for which the current
+    viewer is an owner or collaborator, or member.
+    """
+    affiliations: [RepositoryAffiliation]
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    If non-null, filters repositories according to whether they have issues enabled
+    """
+    hasIssuesEnabled: Boolean
+
+    """
+    If non-null, filters repositories according to whether they have been locked
+    """
+    isLocked: Boolean
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder
+
+    """
+    Array of owner's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    organization or user being viewed owns.
+    """
+    ownerAffiliations: [RepositoryAffiliation] = [OWNER, COLLABORATOR]
+
+    """
+    If non-null, filters repositories according to privacy
+    """
+    privacy: RepositoryPrivacy
+  ): RepositoryConnection!
+
+  """
+  A URL pointing to the user's public website/blog.
+  """
+  websiteUrl: URI
+}
+
+"""
+The possible durations that a user can be blocked for.
+"""
+enum UserBlockDuration {
+  """
+  The user was blocked for 1 day
+  """
+  ONE_DAY
+
+  """
+  The user was blocked for 30 days
+  """
+  ONE_MONTH
+
+  """
+  The user was blocked for 7 days
+  """
+  ONE_WEEK
+
+  """
+  The user was blocked permanently
+  """
+  PERMANENT
+
+  """
+  The user was blocked for 3 days
+  """
+  THREE_DAYS
+}
+
+"""
+Represents a 'user_blocked' event on a given user.
+"""
+type UserBlockedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Number of days that the user was blocked for.
+  """
+  blockDuration: UserBlockDuration!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the UserBlockedEvent object
+  """
+  id: ID!
+
+  """
+  The user who was blocked.
+  """
+  subject: User
+}
+
+"""
+The connection type for User.
+"""
+type UserConnection {
+  """
+  A list of edges.
+  """
+  edges: [UserEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edit on user content
+"""
+type UserContentEdit implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the date and time when the object was deleted.
+  """
+  deletedAt: DateTime
+
+  """
+  The actor who deleted this content
+  """
+  deletedBy: Actor
+
+  """
+  A summary of the changes for this edit
+  """
+  diff: String
+
+  """
+  When this content was edited
+  """
+  editedAt: DateTime!
+
+  """
+  The actor who edited this content
+  """
+  editor: Actor
+
+  """
+  The Node ID of the UserContentEdit object
+  """
+  id: ID!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+A list of edits to content.
+"""
+type UserContentEditConnection {
+  """
+  A list of edges.
+  """
+  edges: [UserContentEditEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [UserContentEdit]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type UserContentEditEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: UserContentEdit
+}
+
+"""
+Represents a user.
+"""
+type UserEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: User
+}
+
+"""
+Email attributes from External Identity
+"""
+type UserEmailMetadata {
+  """
+  Boolean to identify primary emails
+  """
+  primary: Boolean
+
+  """
+  Type of email
+  """
+  type: String
+
+  """
+  Email id
+  """
+  value: String!
+}
+
+"""
+The user's description of what they're currently doing.
+"""
+type UserStatus implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  An emoji summarizing the user's status.
+  """
+  emoji: String
+
+  """
+  The status emoji as HTML.
+  """
+  emojiHTML: HTML
+
+  """
+  If set, the status will not be shown after this date.
+  """
+  expiresAt: DateTime
+
+  """
+  The Node ID of the UserStatus object
+  """
+  id: ID!
+
+  """
+  Whether this status indicates the user is not fully available on GitHub.
+  """
+  indicatesLimitedAvailability: Boolean!
+
+  """
+  A brief message describing what the user is doing.
+  """
+  message: String
+
+  """
+  The organization whose members can see this status. If null, this status is publicly visible.
+  """
+  organization: Organization
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The user who has this status.
+  """
+  user: User!
+}
+
+"""
+The connection type for UserStatus.
+"""
+type UserStatusConnection {
+  """
+  A list of edges.
+  """
+  edges: [UserStatusEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [UserStatus]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type UserStatusEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: UserStatus
+}
+
+"""
+Ordering options for user status connections.
+"""
+input UserStatusOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order user statuses by.
+  """
+  field: UserStatusOrderField!
+}
+
+"""
+Properties by which user status connections can be ordered.
+"""
+enum UserStatusOrderField {
+  """
+  Order user statuses by when they were updated.
+  """
+  UPDATED_AT
+}
+
+"""
+A domain that can be verified or approved for an organization or an enterprise.
+"""
+type VerifiableDomain implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The DNS host name that should be used for verification.
+  """
+  dnsHostName: URI
+
+  """
+  The unicode encoded domain.
+  """
+  domain: URI!
+
+  """
+  Whether a TXT record for verification with the expected host name was found.
+  """
+  hasFoundHostName: Boolean!
+
+  """
+  Whether a TXT record for verification with the expected verification token was found.
+  """
+  hasFoundVerificationToken: Boolean!
+
+  """
+  The Node ID of the VerifiableDomain object
+  """
+  id: ID!
+
+  """
+  Whether or not the domain is approved.
+  """
+  isApproved: Boolean!
+
+  """
+  Whether this domain is required to exist for an organization or enterprise policy to be enforced.
+  """
+  isRequiredForPolicyEnforcement: Boolean!
+
+  """
+  Whether or not the domain is verified.
+  """
+  isVerified: Boolean!
+
+  """
+  The owner of the domain.
+  """
+  owner: VerifiableDomainOwner!
+
+  """
+  The punycode encoded domain.
+  """
+  punycodeEncodedDomain: URI!
+
+  """
+  The time that the current verification token will expire.
+  """
+  tokenExpirationTime: DateTime
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The current verification token for the domain.
+  """
+  verificationToken: String
+}
+
+"""
+The connection type for VerifiableDomain.
+"""
+type VerifiableDomainConnection {
+  """
+  A list of edges.
+  """
+  edges: [VerifiableDomainEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [VerifiableDomain]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type VerifiableDomainEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: VerifiableDomain
+}
+
+"""
+Ordering options for verifiable domain connections.
+"""
+input VerifiableDomainOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order verifiable domains by.
+  """
+  field: VerifiableDomainOrderField!
+}
+
+"""
+Properties by which verifiable domain connections can be ordered.
+"""
+enum VerifiableDomainOrderField {
+  """
+  Order verifiable domains by their creation date.
+  """
+  CREATED_AT
+
+  """
+  Order verifiable domains by the domain name.
+  """
+  DOMAIN
+}
+
+"""
+Types that can own a verifiable domain.
+"""
+union VerifiableDomainOwner = Enterprise | Organization
+
+"""
+Autogenerated input type of VerifyVerifiableDomain
+"""
+input VerifyVerifiableDomainInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the verifiable domain to verify.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["VerifiableDomain"])
+}
+
+"""
+Autogenerated return type of VerifyVerifiableDomain
+"""
+type VerifyVerifiableDomainPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The verifiable domain that was verified.
+  """
+  domain: VerifiableDomain
+}
+
+"""
+A hovercard context with a message describing how the viewer is related.
+"""
+type ViewerHovercardContext implements HovercardContext {
+  """
+  A string describing this context
+  """
+  message: String!
+
+  """
+  An octicon to accompany this context
+  """
+  octicon: String!
+
+  """
+  Identifies the user who is related to this context.
+  """
+  viewer: User!
+}
+
+"""
+A subject that may be upvoted.
+"""
+interface Votable {
+  """
+  Number of upvotes that this subject has received.
+  """
+  upvoteCount: Int!
+
+  """
+  Whether or not the current user can add or remove an upvote on this subject.
+  """
+  viewerCanUpvote: Boolean!
+
+  """
+  Whether or not the current user has already upvoted this subject.
+  """
+  viewerHasUpvoted: Boolean!
+}
+
+"""
+A workflow contains meta information about an Actions workflow file.
+"""
+type Workflow implements Node & UniformResourceLocatable {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the Workflow object
+  """
+  id: ID!
+
+  """
+  The name of the workflow.
+  """
+  name: String!
+
+  """
+  The HTTP path for this workflow
+  """
+  resourcePath: URI!
+
+  """
+  The runs of the workflow.
+  """
+  runs(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the connection
+    """
+    orderBy: WorkflowRunOrder = {field: CREATED_AT, direction: DESC}
+  ): WorkflowRunConnection!
+
+  """
+  The state of the workflow.
+  """
+  state: WorkflowState!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this workflow
+  """
+  url: URI!
+}
+
+"""
+A workflow that must run for this rule to pass
+"""
+type WorkflowFileReference {
+  """
+  The path to the workflow file
+  """
+  path: String!
+
+  """
+  The ref (branch or tag) of the workflow file to use
+  """
+  ref: String
+
+  """
+  The ID of the repository where the workflow is defined
+  """
+  repositoryId: Int!
+
+  """
+  The commit SHA of the workflow file to use
+  """
+  sha: String
+}
+
+"""
+A workflow that must run for this rule to pass
+"""
+input WorkflowFileReferenceInput {
+  """
+  The path to the workflow file
+  """
+  path: String!
+
+  """
+  The ref (branch or tag) of the workflow file to use
+  """
+  ref: String
+
+  """
+  The ID of the repository where the workflow is defined
+  """
+  repositoryId: Int!
+
+  """
+  The commit SHA of the workflow file to use
+  """
+  sha: String
+}
+
+"""
+A workflow run.
+"""
+type WorkflowRun implements Node & UniformResourceLocatable {
+  """
+  The check suite this workflow run belongs to.
+  """
+  checkSuite: CheckSuite!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The log of deployment reviews
+  """
+  deploymentReviews(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DeploymentReviewConnection!
+
+  """
+  The event that triggered the workflow run
+  """
+  event: String!
+
+  """
+  The workflow file
+  """
+  file: WorkflowRunFile
+
+  """
+  The Node ID of the WorkflowRun object
+  """
+  id: ID!
+
+  """
+  The pending deployment requests of all check runs in this workflow run
+  """
+  pendingDeploymentRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DeploymentRequestConnection!
+
+  """
+  The HTTP path for this workflow run
+  """
+  resourcePath: URI!
+
+  """
+  A number that uniquely identifies this workflow run in its parent workflow.
+  """
+  runNumber: Int!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this workflow run
+  """
+  url: URI!
+
+  """
+  The workflow executed in this workflow run.
+  """
+  workflow: Workflow!
+}
+
+"""
+The connection type for WorkflowRun.
+"""
+type WorkflowRunConnection {
+  """
+  A list of edges.
+  """
+  edges: [WorkflowRunEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [WorkflowRun]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type WorkflowRunEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: WorkflowRun
+}
+
+"""
+An executed workflow file for a workflow run.
+"""
+type WorkflowRunFile implements Node & UniformResourceLocatable {
+  """
+  The Node ID of the WorkflowRunFile object
+  """
+  id: ID!
+
+  """
+  The path of the workflow file relative to its repository.
+  """
+  path: String!
+
+  """
+  The direct link to the file in the repository which stores the workflow file.
+  """
+  repositoryFileUrl: URI!
+
+  """
+  The repository name and owner which stores the workflow file.
+  """
+  repositoryName: URI!
+
+  """
+  The HTTP path for this workflow run file
+  """
+  resourcePath: URI!
+
+  """
+  The parent workflow run execution for this file.
+  """
+  run: WorkflowRun!
+
+  """
+  The HTTP URL for this workflow run file
+  """
+  url: URI!
+
+  """
+  If the viewer has permissions to push to the repository which stores the workflow.
+  """
+  viewerCanPushRepository: Boolean!
+
+  """
+  If the viewer has permissions to read the repository which stores the workflow.
+  """
+  viewerCanReadRepository: Boolean!
+}
+
+"""
+Ways in which lists of workflow runs can be ordered upon return.
+"""
+input WorkflowRunOrder {
+  """
+  The direction in which to order workflow runs by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field by which to order workflows.
+  """
+  field: WorkflowRunOrderField!
+}
+
+"""
+Properties by which workflow run connections can be ordered.
+"""
+enum WorkflowRunOrderField {
+  """
+  Order workflow runs by most recently created
+  """
+  CREATED_AT
+}
+
+"""
+The possible states for a workflow.
+"""
+enum WorkflowState {
+  """
+  The workflow is active.
+  """
+  ACTIVE
+
+  """
+  The workflow was deleted from the git repository.
+  """
+  DELETED
+
+  """
+  The workflow was disabled by default on a fork.
+  """
+  DISABLED_FORK
+
+  """
+  The workflow was disabled for inactivity in the repository.
+  """
+  DISABLED_INACTIVITY
+
+  """
+  The workflow was disabled manually.
+  """
+  DISABLED_MANUALLY
+}
+
+"""
+Require all changes made to a targeted branch to pass the specified workflows before they can be merged.
+"""
+type WorkflowsParameters {
+  """
+  Workflows that must pass for this rule to pass.
+  """
+  workflows: [WorkflowFileReference!]!
+}
+
+"""
+Require all changes made to a targeted branch to pass the specified workflows before they can be merged.
+"""
+input WorkflowsParametersInput {
+  """
+  Workflows that must pass for this rule to pass.
+  """
+  workflows: [WorkflowFileReferenceInput!]!
+}
+
+"""
+A valid x509 certificate string
+"""
+scalar X509Certificate
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/simple-star-wars.graphql 0.34.0-1/tests/data/graphql/simple-star-wars.graphql
--- 0.26.4-3/tests/data/graphql/simple-star-wars.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/simple-star-wars.graphql	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,142 @@
+type Person {
+    id: ID!
+    name: String!
+    height: Int
+    mass: Int
+    hair_color: String
+    skin_color: String
+    eye_color: String
+    birth_year: String
+    gender: String
+
+    # Relationships
+    homeworld_id: ID
+    homeworld: Planet
+    species: [Species!]!
+    species_ids: [ID!]!
+    films: [Film!]!
+    films_ids: [ID!]!
+    starships: [Starship!]!
+    starships_ids: [ID!]!
+    vehicles: [Vehicle!]!
+    vehicles_ids: [ID!]!
+}
+
+type Planet {
+    id: ID!
+    name: String!
+    rotation_period: String
+    orbital_period: String
+    diameter: String
+    climate: String
+    gravity: String
+    terrain: String
+    surface_water: String
+    population: String
+
+    # Relationships
+    residents: [Person!]!
+    residents_ids: [ID!]!
+    films: [Film!]!
+    films_ids: [ID!]!
+}
+
+type Species {
+    id: ID!
+    name: String!
+    classification: String
+    designation: String
+    average_height: String
+    skin_colors: String
+    hair_colors: String
+    eye_colors: String
+    average_lifespan: String
+    language: String
+
+    # Relationships
+    people: [Person!]!
+    people_ids: [ID!]!
+    films: [Film!]!
+    films_ids: [ID!]!
+}
+
+type Vehicle {
+    id: ID!
+    name: String!
+    model: String
+    manufacturer: String
+    cost_in_credits: String
+    length: String
+    max_atmosphering_speed: String
+    crew: String
+    passengers: String
+    cargo_capacity: String
+    consumables: String
+    vehicle_class: String
+
+    # Relationships
+    pilots: [Person!]!
+    pilots_ids: [ID!]!
+    films: [Film!]!
+    films_ids: [ID!]!
+}
+
+type Starship {
+    id: ID!
+    name: String!
+    model: String
+    manufacturer: String
+    cost_in_credits: String
+    length: String
+    max_atmosphering_speed: String
+    crew: String
+    passengers: String
+    cargo_capacity: String
+    consumables: String
+    hyperdrive_rating: String
+    MGLT: String
+    starship_class: String
+
+    # Relationships
+    pilots: [Person!]!
+    pilots_ids: [ID!]!
+    films: [Film!]!
+    films_ids: [ID!]!
+}
+
+type Film {
+  id: ID!
+  title: String!
+  episode_id: Int!
+  opening_crawl: String!
+  director: String!
+  producer: String
+  release_date: String!
+
+  # Relationships
+  characters: [Person!]!
+  characters_ids: [ID!]!
+  planets: [Planet!]!
+  planets_ids: [ID!]!
+  starships: [Starship!]!
+  starships_ids: [ID!]!
+  vehicles: [Vehicle!]!
+  vehicles_ids: [ID!]!
+  species: [Species!]!
+  species_ids: [ID!]!
+}
+
+type Query {
+  planet(id: ID!): Planet
+  listPlanets(page: Int): [Planet!]!
+  person(id: ID!): Person
+  listPeople(page: Int): [Person!]!
+  species(id: ID!): Species
+  listSpecies(page: Int): [Species!]!
+  film(id: ID!): Film
+  listFilms(page: Int): [Film!]!
+  starship(id: ID!): Starship
+  listStarships(page: Int): [Starship!]!
+  vehicle(id: ID!): Vehicle
+  listVehicles(page: Int): [Vehicle!]!
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/union-aliased-bug.graphql 0.34.0-1/tests/data/graphql/union-aliased-bug.graphql
--- 0.26.4-3/tests/data/graphql/union-aliased-bug.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/union-aliased-bug.graphql	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+type GroupMetadata {
+  name: String!
+}
+
+type UserMetadata {
+  name: String!
+}
+
+union Metadata = UserMetadata | GroupMetadata
+
+type Resource {
+  metadata: UserMetadata!
+}
diff -pruN 0.26.4-3/tests/data/graphql/union-commented.graphql 0.34.0-1/tests/data/graphql/union-commented.graphql
--- 0.26.4-3/tests/data/graphql/union-commented.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/union-commented.graphql	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+"""This is a test comment in a single line"""
+type GroupMetadata {
+  name: String!
+}
+
+"""This is a multiline comment,
+with a line break,
+and a line break
+"""
+type UserMetadata {
+  name: String!
+}
+
+"""This is another multiline comment,
+with a line break,
+and another line break
+"""
+union Metadata = UserMetadata | GroupMetadata
+
+"""This is a single line comment"""
+union DummyMetadata = UserMetadata | GroupMetadata
+
+type Resource {
+  metadata: UserMetadata!
+}
diff -pruN 0.26.4-3/tests/data/graphql/union.graphql 0.34.0-1/tests/data/graphql/union.graphql
--- 0.26.4-3/tests/data/graphql/union.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/union.graphql	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+interface IResource {
+    id: ID!
+}
+
+type Employee implements IResource {
+    id: ID!
+    firstName: String
+    lastName: String
+}
+
+type Car implements IResource {
+    id: ID!
+    passengerCapacity: Int!
+}
+
+union Resource = Employee | Car
+
+union TechnicalResource = Car
diff -pruN 0.26.4-3/tests/data/graphql/use-standard-collections.graphql 0.34.0-1/tests/data/graphql/use-standard-collections.graphql
--- 0.26.4-3/tests/data/graphql/use-standard-collections.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/use-standard-collections.graphql	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,6 @@
+type A {
+    field: String!
+    listField: [String!]!
+    listListField:[[String!]!]!
+}
+
diff -pruN 0.26.4-3/tests/data/graphql/use-union-operator.graphql 0.34.0-1/tests/data/graphql/use-union-operator.graphql
--- 0.26.4-3/tests/data/graphql/use-union-operator.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/graphql/use-union-operator.graphql	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+type A {
+    field: String!
+    optionalField: String
+    optionalListOptionalField: [String]
+    listOptionalField: [String]!
+    listField: [String!]!
+    optionalListListOptionalField:[[String]!]
+    listListOptionalField:[[String]!]!
+    listOptionalListOptionalField:[[String]]!
+    optionalListOptionalListField:[[String!]]
+    optionalListListField:[[String!]!]
+    listListField:[[String!]!]!
+    listOptionalListField:[[String!]]!
+}
diff -pruN 0.26.4-3/tests/data/json/array_include_null.json 0.34.0-1/tests/data/json/array_include_null.json
--- 0.26.4-3/tests/data/json/array_include_null.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/json/array_include_null.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "items": [
+    {
+      "oofield": null
+    },
+    {
+      "oofield": [1, 2, 3]
+    }
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/json/broken.json 0.34.0-1/tests/data/json/broken.json
--- 0.26.4-3/tests/data/json/broken.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/json/broken.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,2 @@
+{
+  "Pet": {
diff -pruN 0.26.4-3/tests/data/json/duplicate_models.json 0.34.0-1/tests/data/json/duplicate_models.json
--- 0.26.4-3/tests/data/json/duplicate_models.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/json/duplicate_models.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+{
+    "Arm Right": {
+        "Joint 1": 5,
+        "Joint 2": 3,
+        "Joint 3": 66
+    },
+    "Arm Left": {
+        "Joint 1": 55,
+        "Joint 2": 13,
+        "Joint 3": 6
+    },
+    "Head": {
+        "Joint 1": 10
+    }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/json/pet.json 0.34.0-1/tests/data/json/pet.json
--- 0.26.4-3/tests/data/json/pet.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/json/pet.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "Pet": {
+    "name": "dog",
+    "age": 2
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/json/simple.json 0.34.0-1/tests/data/json/simple.json
--- 0.26.4-3/tests/data/json/simple.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/json/simple.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1 @@
+{"petName": "Lady"}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/json/snake_case.json 0.34.0-1/tests/data/json/snake_case.json
--- 0.26.4-3/tests/data/json/snake_case.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/json/snake_case.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,9 @@
+{
+    "snake_case": "Snake case",
+    "camelCase": "Camel case",
+    "kebab-case": "Kebab case",
+    "PascalCase": "Pascal case",
+    "UPPER_CASE": "Upper case",
+    "Dev_Info": "example 1",
+    "CLONE_Device": "example 2"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/json/space_and_special_characters.json 0.34.0-1/tests/data/json/space_and_special_characters.json
--- 0.26.4-3/tests/data/json/space_and_special_characters.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/json/space_and_special_characters.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,26 @@
+{
+    "Serial Number": "A12345678",
+    "Timestamp": "2020-05-26T12:15:25.792741Z",
+    "Data": {
+        "Length (m)": 12.34,
+        "Symmetric deviation (%)": 12.216564148290807,
+        "Total running time (s)": 974,
+        "Mass (kg)": 42.23,
+        "Initial parameters": {
+            "V1": 123,
+            "V2": 456
+        },
+        "class": "Unknown"
+    },
+    "values": {
+        "1 Step": "42",
+        "2 Step": "23"
+    },
+    "recursive": {
+        "sub": {
+            "recursive": {
+                "value": 42.23
+            }
+        }
+    }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_any_of/direct.json 0.34.0-1/tests/data/jsonschema/all_of_any_of/direct.json
--- 0.26.4-3/tests/data/jsonschema/all_of_any_of/direct.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/all_of_any_of/direct.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,45 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Target": {
+      "allOf": [
+        {
+          "anyOf": [
+            {
+              "type": "object",
+              "properties": {
+                "first": {
+                  "type": "string"
+                }
+              },
+              "required": [
+                "first"
+              ]
+            },
+            {
+              "type": "object",
+              "properties": {
+                "second": {
+                  "type": "string"
+                }
+              },
+              "required": [
+                "second"
+              ]
+            }
+          ]
+        },
+        {
+          "properties": {
+            "third": {
+              "type": "string"
+            }
+          },
+          "required": [
+            "third"
+          ]
+        }
+      ]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_any_of/reference.json 0.34.0-1/tests/data/jsonschema/all_of_any_of/reference.json
--- 0.26.4-3/tests/data/jsonschema/all_of_any_of/reference.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/all_of_any_of/reference.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,49 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "First": {
+      "properties": {
+        "first": {
+          "type": "string"
+        }
+      },
+      "required": [
+        "first"
+      ]
+    },
+    "Second": {
+      "properties": {
+        "second": {
+          "type": "string"
+        }
+      },
+      "required": [
+        "second"
+      ]
+    },
+    "Target": {
+      "allOf": [
+        {
+          "anyOf": [
+            {
+              "$ref": "#/definitions/First"
+            },
+            {
+              "$ref": "#/definitions/Second"
+            }
+          ]
+        },
+        {
+          "properties": {
+            "third": {
+              "type": "string"
+            }
+          },
+          "required": [
+            "third"
+          ]
+        }
+      ]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_default.json 0.34.0-1/tests/data/jsonschema/all_of_default.json
--- 0.26.4-3/tests/data/jsonschema/all_of_default.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/all_of_default.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,29 @@
+{
+    "type": "object",
+    "title": "Item",
+    "allOf": [{
+            "title": "Entity",
+            "type": "object"
+    }],
+    "required": [
+        "test",
+        "testarray"
+    ],
+    "properties": {
+        "test": {
+            "type": "string",
+            "default": "test123"
+        },
+        "testarray": {
+            "title": "test array",
+            "type": "array",
+            "items": {
+                "type": "string"
+            },
+            "minItems": 1,
+            "default": [
+                "test123"
+            ]
+        }
+    }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_one_of/direct.json 0.34.0-1/tests/data/jsonschema/all_of_one_of/direct.json
--- 0.26.4-3/tests/data/jsonschema/all_of_one_of/direct.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/all_of_one_of/direct.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,45 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Target": {
+      "allOf": [
+        {
+          "oneOf": [
+            {
+              "type": "object",
+              "properties": {
+                "first": {
+                  "type": "string"
+                }
+              },
+              "required": [
+                "first"
+              ]
+            },
+            {
+              "type": "object",
+              "properties": {
+                "second": {
+                  "type": "string"
+                }
+              },
+              "required": [
+                "second"
+              ]
+            }
+          ]
+        },
+        {
+          "properties": {
+            "third": {
+              "type": "string"
+            }
+          },
+          "required": [
+            "third"
+          ]
+        }
+      ]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_one_of/reference.json 0.34.0-1/tests/data/jsonschema/all_of_one_of/reference.json
--- 0.26.4-3/tests/data/jsonschema/all_of_one_of/reference.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/all_of_one_of/reference.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,49 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "First": {
+      "properties": {
+        "first": {
+          "type": "string"
+        }
+      },
+      "required": [
+        "first"
+      ]
+    },
+    "Second": {
+      "properties": {
+        "second": {
+          "type": "string"
+        }
+      },
+      "required": [
+        "second"
+      ]
+    },
+    "Target": {
+      "allOf": [
+        {
+          "oneOf": [
+            {
+              "$ref": "#/definitions/First"
+            },
+            {
+              "$ref": "#/definitions/Second"
+            }
+          ]
+        },
+        {
+          "properties": {
+            "third": {
+              "type": "string"
+            }
+          },
+          "required": [
+            "third"
+          ]
+        }
+      ]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_ref/base_test.json 0.34.0-1/tests/data/jsonschema/all_of_ref/base_test.json
--- 0.26.4-3/tests/data/jsonschema/all_of_ref/base_test.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/all_of_ref/base_test.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,23 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "$id": "base_test.json",
+  "description": "base test",
+  "type": "object",
+  "definitions": {
+    "first": {
+      "type": "object",
+      "required": [
+        "second"
+      ],
+      "properties": {
+        "second": {
+          "type": "string",
+          "description": "Second",
+          "examples": [
+            "second"
+          ]
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_ref/test.json 0.34.0-1/tests/data/jsonschema/all_of_ref/test.json
--- 0.26.4-3/tests/data/jsonschema/all_of_ref/test.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/all_of_ref/test.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "$id": "test",
+  "description": "test",
+  "type": "object",
+  "allOf": [
+    {
+      "$ref": "base_test.json#/definitions/first"
+    }
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_ref_self.json 0.34.0-1/tests/data/jsonschema/all_of_ref_self.json
--- 0.26.4-3/tests/data/jsonschema/all_of_ref_self.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/all_of_ref_self.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+{
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "$id": "https://example.com/all_of_ref_self.json",
+  "type": "object",
+  "properties": {
+    "version": {
+      "allOf": [
+        { "$ref": "#/$defs/version" }
+      ]
+    }
+  },
+  "$defs": {
+    "version": {
+      "type": "null"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_with_object.json 0.34.0-1/tests/data/jsonschema/all_of_with_object.json
--- 0.26.4-3/tests/data/jsonschema/all_of_with_object.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/all_of_with_object.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,63 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Pet",
+  "allOf": [
+    {
+      "$ref": "#/definitions/Home"
+    },
+    {
+      "$ref": "#/definitions/Kind"
+    },
+    {
+      "$ref": "#/definitions/Id"
+    },
+    {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string"
+        }
+      }
+    }
+  ],
+  "type": [
+    "object"
+  ],
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "age": {
+      "type": "integer"
+    }
+  },
+  "definitions": {
+    "Home": {
+      "type": "object",
+      "properties": {
+        "address": {
+          "type": "string"
+        },
+        "zip": {
+          "type": "string"
+        }
+      }
+    },
+    "Kind": {
+      "type": "object",
+      "properties": {
+        "description": {
+          "type": "string"
+        }
+      }
+    },
+    "Id": {
+      "type": "object",
+      "properties": {
+        "id": {
+          "type": "integer"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/array_field_constraints.json 0.34.0-1/tests/data/jsonschema/array_field_constraints.json
--- 0.26.4-3/tests/data/jsonschema/array_field_constraints.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/array_field_constraints.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,26 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "title": "TestSchema",
+  "description": "For the test",
+  "properties": {
+    "numbers": {
+      "type": "array",
+      "description": "A list of numbers",
+      "items": {
+        "type": "string",
+        "pattern": "^\\d{1,15}$",
+        "description": "Just a number",
+        "examples": [
+          "1",
+          "5464446",
+          "684572369854259"
+        ]
+      }
+    }
+  },
+  "required": [
+    "numbers"
+  ]
+}
+
diff -pruN 0.26.4-3/tests/data/jsonschema/array_in_additional_properties.json 0.34.0-1/tests/data/jsonschema/array_in_additional_properties.json
--- 0.26.4-3/tests/data/jsonschema/array_in_additional_properties.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/array_in_additional_properties.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "$id": "https://example.com/person.schema.json",
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "title": "my JSON of list of string",
+  "type": "object",
+  "additionalProperties": {
+    "type": "array",
+    "items": [
+      {
+        "type": "string"
+      }
+    ]
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/boolean_property.json 0.34.0-1/tests/data/jsonschema/boolean_property.json
--- 0.26.4-3/tests/data/jsonschema/boolean_property.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/boolean_property.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "type": "object",
+  "properties": {
+    "field": true
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/circular_reference.json 0.34.0-1/tests/data/jsonschema/circular_reference.json
--- 0.26.4-3/tests/data/jsonschema/circular_reference.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/circular_reference.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,49 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Member",
+  "$ref": "#/definitions/user",
+  "definitions": {
+    "user": {
+      "title": "User",
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string"
+        },
+        "pet": {
+          "$ref": "#/definitions/animal"
+        },
+        "home": {
+          "$ref": "#/definitions/house"
+        }
+      }
+    },
+    "animal": {
+      "title": "Animal",
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string"
+        },
+        "breeder": {
+          "$ref": "#/definitions/user"
+        },
+        "home": {
+          "$ref": "#/definitions/house"
+        }
+      }
+    },
+    "house": {
+      "title": "House",
+      "type": "object",
+      "properties": {
+        "address": {
+          "type": "string"
+        },
+        "owner": {
+          "$ref": "#/definitions/user"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/combine_any_of_object.json 0.34.0-1/tests/data/jsonschema/combine_any_of_object.json
--- 0.26.4-3/tests/data/jsonschema/combine_any_of_object.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/combine_any_of_object.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,42 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "title": "My schema",
+    "additionalProperties": true,
+    "properties": {
+        "AddressLine1": { "type": "string" },
+        "AddressLine2": { "type": "string" },
+        "City":         { "type": "string" }
+    },
+    "required": [ "AddressLine1" ],
+    "anyOf": [
+        {
+            "type": "object",
+            "properties": {
+                "State":   { "type": "string" },
+                "ZipCode": { "type": "string" }
+            },
+            "required": [ "ZipCode" ]
+        },
+        {
+            "type": "object",
+            "properties": {
+                "County":   { "type": "string" },
+                "PostCode": { "type": "string" }
+            },
+            "required": [ "PostCode" ]
+        },
+        { "$ref": "#/definitions/US" }
+    ],
+    "definitions": {
+        "US":  {
+            "type": "object",
+            "properties": {
+                "County":   { "type": "string" },
+                "PostCode": { "type": "string" }
+            },
+            "required": [ "PostCode" ]
+        }
+    }
+}
+
diff -pruN 0.26.4-3/tests/data/jsonschema/combine_one_of_object.json 0.34.0-1/tests/data/jsonschema/combine_one_of_object.json
--- 0.26.4-3/tests/data/jsonschema/combine_one_of_object.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/combine_one_of_object.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,42 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "title": "My schema",
+    "additionalProperties": true,
+    "properties": {
+        "AddressLine1": { "type": "string" },
+        "AddressLine2": { "type": "string" },
+        "City":         { "type": "string" }
+    },
+    "required": [ "AddressLine1" ],
+    "oneOf": [
+        {
+            "type": "object",
+            "properties": {
+                "State":   { "type": "string" },
+                "ZipCode": { "type": "string" }
+            },
+            "required": [ "ZipCode" ]
+        },
+        {
+            "type": "object",
+            "properties": {
+                "County":   { "type": "string" },
+                "PostCode": { "type": "string" }
+            },
+            "required": [ "PostCode" ]
+        },
+        { "$ref": "#/definitions/US" }
+    ],
+    "definitions": {
+        "US":  {
+            "type": "object",
+            "properties": {
+                "County":   { "type": "string" },
+                "PostCode": { "type": "string" }
+            },
+            "required": [ "PostCode" ]
+        }
+    }
+}
+
diff -pruN 0.26.4-3/tests/data/jsonschema/combined_array.json 0.34.0-1/tests/data/jsonschema/combined_array.json
--- 0.26.4-3/tests/data/jsonschema/combined_array.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/combined_array.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,209 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Pet",
+  "type": [
+    "object",
+    "array"
+  ],
+  "items": {
+    "$ref": "#"
+  },
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "age": {
+      "type": "integer"
+    }
+  },
+  "definitions": {
+    "CombinedEnum": {
+      "type": [
+        "array",
+        "string"
+      ],
+      "items": {
+        "$ref": "#/definitions/Kind"
+      },
+      "enum": [
+        "green",
+        "red"
+      ]
+    },
+    "CombinedAllOf": {
+      "type": [
+        "array"
+      ],
+      "items": {
+        "$ref": "#/definitions/Kind"
+      },
+      "allOf": [
+        {
+          "$ref": "#/definitions/Kind"
+        },
+        {
+          "$ref": "#/definitions/Id"
+        },
+        {
+          "$ref": "#/definitions/CustomRootModel"
+        }
+      ]
+    },
+    "CombinedObjectField": {
+      "type": "object",
+      "properties": {
+        "CombinedEnumField": {
+          "type": [
+            "array",
+            "string"
+          ],
+          "items": {
+            "$ref": "#/definitions/Kind"
+          },
+          "enum": [
+            "green",
+            "red"
+          ]
+        },
+        "CombinedAllOfField": {
+          "type": [
+            "array"
+          ],
+          "items": {
+            "$ref": "#/definitions/Kind"
+          },
+          "allOf": [
+            {
+              "$ref": "#/definitions/Kind"
+            },
+            {
+              "$ref": "#/definitions/Id"
+            },
+            {
+              "$ref": "#/definitions/CustomRootModel"
+            }
+          ]
+        },
+        "CombinedObjectField": {
+          "type": [
+            "array",
+            "object"
+          ],
+          "items": {
+            "$ref": "#/definitions/Kind"
+          },
+          "properties": {
+            "color": {
+              "type": "string"
+            }
+          }
+        },
+        "CombinedAllOfObjectField": {
+          "type": [
+            "array",
+            "object"
+          ],
+          "properties": {
+            "color": {
+              "type": "string"
+            }
+          },
+          "items": {
+            "$ref": "#/definitions/Kind"
+          },
+          "allOf": [
+            {
+              "$ref": "#/definitions/Kind"
+            },
+            {
+              "$ref": "#/definitions/Id"
+            },
+            {
+              "$ref": "#/definitions/CustomRootModel"
+            }
+          ]
+        }
+      }
+    },
+    "CombinedSelf": {
+      "type": [
+        "array",
+        "object"
+      ],
+      "items": {
+        "$ref": "#/definitions/CombinedSelf"
+      },
+      "properties": {
+        "color": {
+          "type": "string"
+        }
+      }
+    },
+    "CombinedSelfEnum": {
+      "type": [
+        "array",
+        "object"
+      ],
+      "items": {
+        "$ref": "#/definitions/CombinedSelfEnum"
+      },
+      "properties": {
+        "color": {
+          "type": "string"
+        }
+      },
+      "enum": [
+        "green",
+        "red"
+      ]
+    },
+    "CombinedSelfAllOf": {
+      "type": [
+        "array",
+        "object"
+      ],
+      "items": {
+        "$ref": "#/definitions/CombinedSelfAllOf"
+      },
+      "properties": {
+        "color": {
+          "type": "string"
+        }
+      },
+      "enum": [
+        "green",
+        "red"
+      ],
+      "allOf": [
+        {
+          "$ref": "#/definitions/Kind"
+        },
+        {
+          "$ref": "#/definitions/Id"
+        },
+        {
+          "$ref": "#/definitions/CustomRootModel"
+        }
+      ]
+    },
+    "Kind": {
+      "type": "object",
+      "properties": {
+        "description": {
+          "type": "string"
+        }
+      }
+    },
+    "Id": {
+      "type": "object",
+      "properties": {
+        "id": {
+          "type": "integer"
+        }
+      }
+    },
+    "CustomRootModel": {
+      "type": "string"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/complex_any_of.json 0.34.0-1/tests/data/jsonschema/complex_any_of.json
--- 0.26.4-3/tests/data/jsonschema/complex_any_of.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/complex_any_of.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,46 @@
+{
+  "$schema": "http://json-schema.org/draft/2019-09/schema#",
+  "anyOf": [
+    {
+      "type": "integer",
+      "$comment": "int"
+    },
+    {
+      "type": "array",
+      "items": {
+        "type": "object",
+        "properties": {
+          "key": {
+            "type": "object",
+            "properties": {
+              "address": {
+                "type": "string",
+                "$comment": "address"
+              },
+              "nat": {
+                "type": "string",
+                "$comment": "nat"
+              }
+            },
+            "required": [
+              "address",
+              "nat"
+            ],
+            "additionalProperties": false,
+            "$comment": "pair"
+          },
+          "value": {
+            "type": "string",
+            "$comment": "nat"
+          }
+        },
+        "required": [
+          "key",
+          "value"
+        ],
+        "additionalProperties": false
+      }
+    }
+  ],
+  "$comment": "big_map"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/complex_one_of.json 0.34.0-1/tests/data/jsonschema/complex_one_of.json
--- 0.26.4-3/tests/data/jsonschema/complex_one_of.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/complex_one_of.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,46 @@
+{
+  "$schema": "http://json-schema.org/draft/2019-09/schema#",
+  "oneOf": [
+    {
+      "type": "integer",
+      "$comment": "int"
+    },
+    {
+      "type": "array",
+      "items": {
+        "type": "object",
+        "properties": {
+          "key": {
+            "type": "object",
+            "properties": {
+              "address": {
+                "type": "string",
+                "$comment": "address"
+              },
+              "nat": {
+                "type": "string",
+                "$comment": "nat"
+              }
+            },
+            "required": [
+              "address",
+              "nat"
+            ],
+            "additionalProperties": false,
+            "$comment": "pair"
+          },
+          "value": {
+            "type": "string",
+            "$comment": "nat"
+          }
+        },
+        "required": [
+          "key",
+          "value"
+        ],
+        "additionalProperties": false
+      }
+    }
+  ],
+  "$comment": "big_map"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/complicated_enum.json 0.34.0-1/tests/data/jsonschema/complicated_enum.json
--- 0.26.4-3/tests/data/jsonschema/complicated_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/complicated_enum.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,39 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "ProcessingStatus": {
+      "title": "ProcessingStatus",
+      "enum": [
+        "COMPLETED",
+        "PENDING",
+        "FAILED"
+      ],
+      "type": "string",
+      "description": "The processing status"
+    },
+    "kind": {"type": "string"}
+  },
+  "title": "ProcessingTask",
+  "type": "object",
+  "properties": {
+    "processing_status_union": {
+      "allOf": [
+        { "type": "string"},
+        {
+          "$ref": "#/definitions/ProcessingStatus"
+        }
+      ],
+      "default": "COMPLETED"
+    },
+    "processing_status": {
+      "$ref": "#/definitions/ProcessingStatus",
+      "default": "COMPLETED"
+    },
+    "name": {
+      "type": "string"
+    },
+    "kind": {
+      "$ref": "#/definitions/kind"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/const.json 0.34.0-1/tests/data/jsonschema/const.json
--- 0.26.4-3/tests/data/jsonschema/const.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/const.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "title": "Const",
+  "properties": {
+    "foo": {
+      "const": "foo"
+    }
+  },
+  "required": ["foo"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/custom_base_path.json 0.34.0-1/tests/data/jsonschema/custom_base_path.json
--- 0.26.4-3/tests/data/jsonschema/custom_base_path.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/custom_base_path.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,59 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "House",
+  "type": "object",
+  "customBasePath": "custom.models.Property",
+  "properties": {
+    "address": {
+      "type": "string"
+    },
+    "owner": {
+      "type": "object",
+      "customBasePath": "custom.models.Person",
+      "properties": {
+        "job": {
+          "type": "string"
+        },
+        "spouse": {
+          "type": "object",
+          "customBasePath": "custom.models.Person",
+          "properties": {
+            "job": {
+              "type": "string"
+            }
+          }
+        },
+        "children": {
+          "type": "array",
+          "items": {
+            "type": "object",
+            "customBasePath": "custom.models.Person",
+            "properties": {
+              "school": {
+                "type": "string"
+              },
+              "grade": {
+                "type": "number"
+              },
+              "pets": {
+                "type": "array",
+                "items": {
+                  "type": "object",
+                  "customBasePath": "custom.models.Animal",
+                  "properties": {
+                    "name": {
+                      "type": "string"
+                    }
+                  }
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+  },
+  "required": [
+    "address"
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/custom_type_path.json 0.34.0-1/tests/data/jsonschema/custom_type_path.json
--- 0.26.4-3/tests/data/jsonschema/custom_type_path.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/custom_type_path.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,37 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Person",
+  "type": "object",
+  "properties": {
+    "firstName": {
+      "type": "string",
+      "description": "The person's first name.",
+      "customTypePath": "custom.TitleString"
+    },
+    "lastName": {
+      "type": "string",
+      "description": "The person's last name.",
+      "customTypePath": "custom.special.UpperString"
+    },
+    "age": {
+      "description": "Age in years which must be equal to or greater than zero.",
+      "type": "integer",
+      "minimum": 0,
+      "customTypePath": "custom.special.numbers.Age"
+    },
+    "friends": {
+      "type": "array",
+      "customTypePath": "custom.collection.array.Friends"
+    },
+    "comment": {
+      "type": "null",
+      "customTypePath": "custom.MultipleLineString"
+    }
+  },
+  "definitions": {
+    "RootedCustomType": {
+      "type": "string",
+      "customTypePath": "custom.SpecialString"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/datetime.json 0.34.0-1/tests/data/jsonschema/datetime.json
--- 0.26.4-3/tests/data/jsonschema/datetime.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/datetime.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "User": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "example": "ken"
+        },
+        "birthDay": {
+          "type": "string",
+          "format": "date-time",
+          "example": "2016-08-29T09:12:33.001Z"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/discriminator_literals.json 0.34.0-1/tests/data/jsonschema/discriminator_literals.json
--- 0.26.4-3/tests/data/jsonschema/discriminator_literals.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/discriminator_literals.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,62 @@
+{
+  "$defs": {
+    "Type1": {
+      "properties": {
+        "type_": {
+          "const": "a",
+          "default": "a",
+          "title": "Type "
+        }
+      },
+      "title": "Type1",
+      "type": "object"
+    },
+    "Type2": {
+      "properties": {
+        "type_": {
+          "const": "b",
+          "default": "b",
+          "title": "Type "
+        }
+      },
+      "title": "Type2",
+      "type": "object"
+    },
+    "UnrelatedType": {
+      "properties": {
+        "info": {
+          "default": "Unrelated type, not involved in the discriminated union",
+          "title": "A way to check for side effects",
+          "type": "string"
+        }
+      },
+      "title": "UnrelatedType",
+      "type": "object"
+    }
+  },
+  "properties": {
+    "inner": {
+      "discriminator": {
+        "mapping": {
+          "a": "#/$defs/Type1",
+          "b": "#/$defs/Type2"
+        },
+        "propertyName": "type_"
+      },
+      "oneOf": [
+        {
+          "$ref": "#/$defs/Type1"
+        },
+        {
+          "$ref": "#/$defs/Type2"
+        }
+      ],
+      "title": "Inner"
+    }
+  },
+  "required": [
+    "inner"
+  ],
+  "title": "Response",
+  "type": "object"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/discriminator_no_mapping.json 0.34.0-1/tests/data/jsonschema/discriminator_no_mapping.json
--- 0.26.4-3/tests/data/jsonschema/discriminator_no_mapping.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/discriminator_no_mapping.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,47 @@
+{
+  "$defs": {
+    "Cat": {
+      "properties": {
+        "pet_type": {
+          "const": "cat"
+        }
+      },
+      "required": [
+        "pet_type"
+      ],
+      "type": "object"
+    },
+    "Dog": {
+      "properties": {
+        "pet_type": {
+          "const": "dog"
+        }
+      },
+      "required": [
+        "pet_type"
+      ],
+      "type": "object"
+    }
+  },
+  "properties": {
+    "pet": {
+      "discriminator": {
+        "propertyName": "pet_type"
+      },
+      "oneOf": [
+        {
+          "$ref": "#/$defs/Cat"
+        },
+        {
+          "$ref": "#/$defs/Dog"
+        }
+      ],
+      "title": "Pet"
+    }
+  },
+  "required": [
+    "pet"
+  ],
+  "title": "Animal",
+  "type": "object"
+} 
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/artificial_folder/type-1.json 0.34.0-1/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/artificial_folder/type-1.json
--- 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/artificial_folder/type-1.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/artificial_folder/type-1.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "properties": {
+    "type_": {
+      "const": "a",
+      "default": "a",
+      "title": "Type "
+    }
+  },
+  "title": "Type1",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/schema.json 0.34.0-1/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/schema.json
--- 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/schema.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,52 @@
+{
+  "$def": {
+    "Type3": {
+      "properties": {
+        "type_": {
+          "const": "c",
+          "default": "c",
+          "title": "Type "
+        }
+      },
+      "title": "Type3",
+      "type": "object"
+    }
+  },
+  "properties": {
+    "inner": {
+      "discriminator": {
+        "mapping": {
+          "a": "./artificial_folder/type-1.json",
+          "b": "./type-2.json",
+          "c": "#/$def/Type3",
+          "d": "../type-4.json",
+          "e": "../subfolder/type-5.json"
+        },
+        "propertyName": "type_"
+      },
+      "oneOf": [
+        {
+          "$ref": "./artificial_folder/type-1.json"
+        },
+        {
+          "$ref": "./type-2.json"
+        },
+        {
+          "$ref": "#/$def/Type3"
+        },
+        {
+          "$ref": "../type-4.json"
+        },
+        {
+          "$ref": "../subfolder/type-5.json"
+        }
+      ],
+      "title": "Inner"
+    }
+  },
+  "required": [
+    "inner"
+  ],
+  "title": "Response",
+  "type": "object"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/type-2.json 0.34.0-1/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/type-2.json
--- 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/type-2.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/type-2.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "properties": {
+    "type_": {
+      "const": "b",
+      "default": "b",
+      "title": "Type "
+    },
+    "ref_type": {
+      "$ref": "./artificial_folder/type-1.json",
+      "description": "A referenced type."
+    }
+  },
+  "title": "Type2",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/subfolder/type-5.json 0.34.0-1/tests/data/jsonschema/discriminator_with_external_reference/subfolder/type-5.json
--- 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/subfolder/type-5.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/discriminator_with_external_reference/subfolder/type-5.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "properties": {
+    "type_": {
+      "const": "e",
+      "default": "e",
+      "title": "Type "
+    }
+  },
+  "title": "Type5",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/type-4.json 0.34.0-1/tests/data/jsonschema/discriminator_with_external_reference/type-4.json
--- 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/type-4.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/discriminator_with_external_reference/type-4.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "properties": {
+    "type_": {
+      "const": "d",
+      "default": "d",
+      "title": "Type "
+    }
+  },
+  "title": "Type4",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/duplicate_enum.json 0.34.0-1/tests/data/jsonschema/duplicate_enum.json
--- 0.26.4-3/tests/data/jsonschema/duplicate_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/duplicate_enum.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,51 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema",
+  "title": "User",
+  "type": "object",
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "animal": {
+      "type": "string",
+      "enum": [
+        "dog",
+        "cat",
+        "snake"
+      ],
+      "default": "dog"
+    },
+    "pet": {
+      "type": "string",
+      "enum": [
+        "dog",
+        "cat",
+        "snake"
+      ],
+      "default": "cat"
+    },
+    "redistribute": {
+      "type": "array",
+      "items": {
+        "type": "string",
+        "enum": [
+          "static",
+          "connected"
+        ]
+      }
+    }
+  },
+  "definitions": {
+    "redistribute": {
+      "type": "array",
+      "items": {
+        "type": "string",
+        "enum": [
+          "static",
+          "connected"
+        ]
+      },
+      "description": "Redistribute type for routes."
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/duplicate_field_constraints/common.yaml 0.34.0-1/tests/data/jsonschema/duplicate_field_constraints/common.yaml
--- 0.26.4-3/tests/data/jsonschema/duplicate_field_constraints/common.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/duplicate_field_constraints/common.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+---
+$schema: https://json-schema.org/draft/2020-12/schema
+$id: common.yaml
+definitions:
+  ulid:
+    type: string
+    pattern: '[0-9ABCDEFGHJKMNPQRSTVWXYZ]{26,26}'
diff -pruN 0.26.4-3/tests/data/jsonschema/duplicate_field_constraints/test.yaml 0.34.0-1/tests/data/jsonschema/duplicate_field_constraints/test.yaml
--- 0.26.4-3/tests/data/jsonschema/duplicate_field_constraints/test.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/duplicate_field_constraints/test.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,10 @@
+---
+$schema: https://json-schema.org/draft/2020-12/schema
+$id: test.yaml
+title: test
+required:
+  - uid
+properties:
+  uid:
+    description: ulid of this object
+    $ref: ./common.yaml#/definitions/ulid
diff -pruN 0.26.4-3/tests/data/jsonschema/duplicate_name/bar.json 0.34.0-1/tests/data/jsonschema/duplicate_name/bar.json
--- 0.26.4-3/tests/data/jsonschema/duplicate_name/bar.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/duplicate_name/bar.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,12 @@
+{
+  "$id": "bar.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Bar",
+  "type": "object",
+  "definitions": {
+    "logLevels": {
+      "description": "Supported logging levels",
+      "type": "string"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/duplicate_name/foo.json 0.34.0-1/tests/data/jsonschema/duplicate_name/foo.json
--- 0.26.4-3/tests/data/jsonschema/duplicate_name/foo.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/duplicate_name/foo.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,12 @@
+{
+  "$id": "foo.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Foo",
+  "type": "object",
+  "definitions": {
+    "logLevels": {
+      "description": "Supported logging levels",
+      "type": "string"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/duration.json 0.34.0-1/tests/data/jsonschema/duration.json
--- 0.26.4-3/tests/data/jsonschema/duration.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/duration.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Test": {
+      "type": "object",
+      "properties": {
+        "s_duration": {
+          "type": "string",
+          "format": "duration",
+          "example": "PT2H33M3S"
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/enum_in_root/enum_in_root.json 0.34.0-1/tests/data/jsonschema/enum_in_root/enum_in_root.json
--- 0.26.4-3/tests/data/jsonschema/enum_in_root/enum_in_root.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/enum_in_root/enum_in_root.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "type": "object",
+  "properties": {
+    "orderReference": {
+      "type": "string",
+      "examples": [
+        "27378669"
+      ],
+      "description": "Reference number of the order"
+    },
+    "brand": {
+       "$ref": "./schema.json#/$defs/brand",
+      "$id": "#root/brand"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/enum_in_root/schema.json 0.34.0-1/tests/data/jsonschema/enum_in_root/schema.json
--- 0.26.4-3/tests/data/jsonschema/enum_in_root/schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/enum_in_root/schema.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "$defs": {
+    "brand": {
+      "type": "string",
+      "enum": [
+        "OPUS",
+        "someday"
+      ],
+      "description": "purchased brand"
+    }
+  }
+}
+
diff -pruN 0.26.4-3/tests/data/jsonschema/external_child.json 0.34.0-1/tests/data/jsonschema/external_child.json
--- 0.26.4-3/tests/data/jsonschema/external_child.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_child.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "Json": {
+    "type": "object",
+    "properties": {
+      "firstName": {
+        "type": "string"
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_child.yaml 0.34.0-1/tests/data/jsonschema/external_child.yaml
--- 0.26.4-3/tests/data/jsonschema/external_child.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_child.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+Yaml:
+  type: "object"
+  properties:
+    firstName:
+      type: "string"
+
+
diff -pruN 0.26.4-3/tests/data/jsonschema/external_child_root.json 0.34.0-1/tests/data/jsonschema/external_child_root.json
--- 0.26.4-3/tests/data/jsonschema/external_child_root.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_child_root.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/schema#",
+  "type": "object",
+  "title": "Object",
+  "properties": {
+    "somefield": {
+      "type": "integer"
+    }
+  },
+  "required": [
+    "metadata"
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_collapse/child.json 0.34.0-1/tests/data/jsonschema/external_collapse/child.json
--- 0.26.4-3/tests/data/jsonschema/external_collapse/child.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_collapse/child.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,4 @@
+{
+    "$schema": "http://json-schema.org/draft/2019-09/schema#",
+    "type": "string"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/external_collapse/parent.json 0.34.0-1/tests/data/jsonschema/external_collapse/parent.json
--- 0.26.4-3/tests/data/jsonschema/external_collapse/parent.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_collapse/parent.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,6 @@
+{
+    "$schema": "http://json-schema.org/draft/2019-09/schema#",
+    "properties": {
+        "item":  {"$ref": "child.json"}
+    }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/external_definitions.json 0.34.0-1/tests/data/jsonschema/external_definitions.json
--- 0.26.4-3/tests/data/jsonschema/external_definitions.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_definitions.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "elegantName": {
+        "type": "string",
+        "minLength": 3
+    }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_definitions_root.json 0.34.0-1/tests/data/jsonschema/external_definitions_root.json
--- 0.26.4-3/tests/data/jsonschema/external_definitions_root.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_definitions_root.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "title": "Person",
+    "type": "object",
+    "additionalProperties": false,
+    "properties": {
+        "name": {
+            "$ref": "external_definitions.json#/elegantName"
+        }
+    },
+    "required": [
+        "name"
+    ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/drink/coffee.json 0.34.0-1/tests/data/jsonschema/external_files_in_directory/definitions/drink/coffee.json
--- 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/drink/coffee.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_files_in_directory/definitions/drink/coffee.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "$id": "https://example.com/external_files_in_directory/definitions/drink/coffee.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Coffee",
+  "type": "string",
+    "enum": [
+        "Black",
+        "Espresso"
+    ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/drink/tea.json 0.34.0-1/tests/data/jsonschema/external_files_in_directory/definitions/drink/tea.json
--- 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/drink/tea.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_files_in_directory/definitions/drink/tea.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "$id": "https://example.com/external_files_in_directory/definitions/drinnk/tea.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Tea",
+  "type": "string",
+    "enum": [
+        "Oolong",
+        "Green"
+    ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/food.json 0.34.0-1/tests/data/jsonschema/external_files_in_directory/definitions/food.json
--- 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/food.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_files_in_directory/definitions/food.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+{
+  "$id": "https://example.com/external_files_in_directory/definitions/drink/food.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "noodle": {
+      "type": "string",
+      "enum": [
+        "ramen",
+        "spaghetti"
+      ]
+    },
+    "soup": {
+      "type": "string",
+      "enum": [
+        "bean",
+        "mushroom",
+        "tomato"
+      ]
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/friends.json 0.34.0-1/tests/data/jsonschema/external_files_in_directory/definitions/friends.json
--- 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/friends.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_files_in_directory/definitions/friends.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,33 @@
+{
+  "$id": "https://example.com/external_files_in_directory/definitions/friends.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Friends",
+  "type": "array",
+  "items": {
+    "properties": {
+      "name": {
+        "type": "string",
+        "example": "John Doe"
+      },
+      "phone_number": {
+        "type": "string",
+        "example": "(555) 555-1234"
+      },
+     "food": {
+      "type": "array",
+      "items": [
+        {
+          "$ref": "food.json#/definitions/noodle"
+        },
+        {
+          "$ref": "food.json#/definitions/soup"
+        }
+      ]
+    }
+    },
+    "additionalProperties": true,
+    "required": [
+      "name"
+    ]
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/machine/robot.json 0.34.0-1/tests/data/jsonschema/external_files_in_directory/definitions/machine/robot.json
--- 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/machine/robot.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_files_in_directory/definitions/machine/robot.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+{
+  "$id": "https://example.com/external_files_in_directory/definitions/machine/robot.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Robot",
+  "type": "object",
+  "allOf": [{ "$ref": "../relative/animal/pet/pet.json"}],
+  "properties": {
+    "friends": {
+     "$ref": "../../person.json"
+    },
+    "drink": {
+      "$ref": "../drink/coffee.json#"
+    },
+    "food": {
+      "$ref": "../food.json#/definitions/noodle"
+    },
+    "pet": {
+      "$ref": "../relative/animal/pet/pet.json"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/relative/animal/fur.json 0.34.0-1/tests/data/jsonschema/external_files_in_directory/definitions/relative/animal/fur.json
--- 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/relative/animal/fur.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_files_in_directory/definitions/relative/animal/fur.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "$id": "https://example.com/external_files_in_directory/definitions/relative/animal/fur.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Fur",
+  "type": "string",
+    "enum": [
+        "Short hair",
+        "Long hair"
+    ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/relative/animal/pet/pet.json 0.34.0-1/tests/data/jsonschema/external_files_in_directory/definitions/relative/animal/pet/pet.json
--- 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/relative/animal/pet/pet.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_files_in_directory/definitions/relative/animal/pet/pet.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+{
+  "$id": "https://example.com/external_files_in_directory/definitions/relative/animal/pet/pet.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Pet",
+  "type": "object",
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "age": {
+      "type": "integer"
+    },
+    "fur": {
+      "$ref": "../fur.json"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_files_in_directory/person.json 0.34.0-1/tests/data/jsonschema/external_files_in_directory/person.json
--- 0.26.4-3/tests/data/jsonschema/external_files_in_directory/person.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_files_in_directory/person.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,64 @@
+{
+  "$id": "https://example.com/external_files_in_directory/person.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Person",
+  "type": "object",
+  "properties": {
+    "first_name": {
+      "type": "string",
+      "description": "The person's first name."
+    },
+    "last_name": {
+      "type": "string",
+      "description": "The person's last name."
+    },
+    "age": {
+      "description": "Age in years.",
+      "type": "integer",
+      "minimum": 0
+    },
+    "pets": {
+      "type": "array",
+      "items": [
+        {
+          "$ref": "definitions/relative/animal/pet/pet.json#"
+        }
+      ]
+    },
+    "friends": {
+      "$ref": "definitions/friends.json#"
+    },
+    "robot": {
+      "$ref": "./definitions/machine/robot.json"
+    },
+    "comment": {
+      "type": "null"
+    },
+    "drink": {
+      "type": "array",
+      "items": [
+        {
+          "$ref": "definitions/drink/coffee.json"
+        },
+        {
+          "$ref": "definitions/drink/tea.json#/"
+        }
+      ]
+    },
+    "food": {
+      "type": "array",
+      "items": [
+        {
+          "$ref": "definitions/food.json#/definitions/noodle"
+        },
+        {
+          "$ref": "definitions/food.json#/definitions/soup"
+        }
+      ]
+    }
+  },
+  "required": [
+      "first_name",
+      "last_name"
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_parent.json 0.34.0-1/tests/data/jsonschema/external_parent.json
--- 0.26.4-3/tests/data/jsonschema/external_parent.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_parent.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,51 @@
+{
+  "type": "object",
+  "properties": {
+    "firstName": {
+      "type": "string"
+    },
+    "childYaml": {
+      "$ref": "external_child.yaml#/Yaml"
+    },
+    "childJson": {
+      "$ref": "external_child.json#/Json"
+    },
+    "childDuplicateJson": {
+      "$ref": "external_child.json#/Json"
+    },
+    "childLocal": {
+      "$ref": "#Local"
+    },
+    "nested": {
+        "items": [
+            {
+              "childLocal": {
+                "$ref": "#Local"
+              }
+            }
+        ]
+    }
+  },
+  "items": {
+    "childLocal": {
+           "$ref": "#Local"
+    }
+  },
+  "additionalProperties": {
+        "childLocal": {
+           "$ref": "#Local"
+    }
+  },
+  "anyOf": [
+    {    "childLocal": {
+           "$ref": "#Local"
+        }
+    }
+  ],
+  "allOf": [
+    {    "childLocal": {
+           "$ref": "#Local"
+        }
+    }
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_parent_root.json 0.34.0-1/tests/data/jsonschema/external_parent_root.json
--- 0.26.4-3/tests/data/jsonschema/external_parent_root.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_parent_root.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/schema#",
+  "type": "object",
+  "title": "Object",
+  "properties": {
+    "metadata": {
+      "$ref": "external_child_root.json#/"
+    }
+  },
+  "required": [
+    "metadata"
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_reference/other/ref1.json 0.34.0-1/tests/data/jsonschema/external_reference/other/ref1.json
--- 0.26.4-3/tests/data/jsonschema/external_reference/other/ref1.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_reference/other/ref1.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,9 @@
+{
+    "$schema": "https://json-schema.org/draft/2020-12/schema",
+    "anyOf": [
+        {
+            "$ref": "ref2.json#/"
+        },
+        {"type": "null"}
+    ]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/external_reference/other/ref2.json 0.34.0-1/tests/data/jsonschema/external_reference/other/ref2.json
--- 0.26.4-3/tests/data/jsonschema/external_reference/other/ref2.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_reference/other/ref2.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "$schema": "https://json-schema.org/draft/2020-12/schema",
+    "title": "Other",
+    "properties": {
+        "key": {"type": "string"}
+    }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/external_reference/ref0.json 0.34.0-1/tests/data/jsonschema/external_reference/ref0.json
--- 0.26.4-3/tests/data/jsonschema/external_reference/ref0.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_reference/ref0.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+{
+    "$schema": "https://json-schema.org/draft/2020-12/schema",
+    "properties": {
+        "ref1": {
+            "$ref": "ref1.json#/"
+        },
+        "other_ref1": {
+            "$ref": "other/ref1.json#/"
+        }
+    }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/external_reference/ref1.json 0.34.0-1/tests/data/jsonschema/external_reference/ref1.json
--- 0.26.4-3/tests/data/jsonschema/external_reference/ref1.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_reference/ref1.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,9 @@
+{
+    "$schema": "https://json-schema.org/draft/2020-12/schema",
+    "anyOf": [
+        {
+            "$ref": "ref2.json#/"
+        },
+        {"type": "null"}
+    ]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/external_reference/ref2.json 0.34.0-1/tests/data/jsonschema/external_reference/ref2.json
--- 0.26.4-3/tests/data/jsonschema/external_reference/ref2.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/external_reference/ref2.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,6 @@
+{
+    "$schema": "https://json-schema.org/draft/2020-12/schema",
+    "properties": {
+        "key": {"type": "string"}
+    }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/extra_data_msgspec.json 0.34.0-1/tests/data/jsonschema/extra_data_msgspec.json
--- 0.26.4-3/tests/data/jsonschema/extra_data_msgspec.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/extra_data_msgspec.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "#all#": {
+        "base_class_kwargs": {
+            "omit_defaults": true
+        }
+    }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/extra_fields.json 0.34.0-1/tests/data/jsonschema/extra_fields.json
--- 0.26.4-3/tests/data/jsonschema/extra_fields.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/extra_fields.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,36 @@
+{
+  "title": "Test",
+  "type": "object",
+  "required": [
+    "foo"
+  ],
+  "properties": {
+    "foo": {
+      "type": "object",
+      "properties": {
+        "x": {
+          "type": "integer"
+        }
+      },
+      "additionalProperties": true
+    },
+    "bar": {
+      "type": "object",
+      "properties": {
+        "y": {
+          "type": "integer"
+        }
+      },
+      "additionalProperties": false
+    },
+    "baz": {
+      "type": "object",
+      "properties": {
+        "z": {
+          "type": "integer"
+        }
+      }
+    }
+  },
+  "additionalProperties": false
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/extras.json 0.34.0-1/tests/data/jsonschema/extras.json
--- 0.26.4-3/tests/data/jsonschema/extras.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/extras.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,33 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Extras",
+  "type": "object",
+  "properties": {
+    "name": {
+      "type": "string",
+      "description": "normal key",
+      "key1": 123,
+      "key2": 456,
+      "$exclude": 123,
+      "invalid-key-1": "abc",
+      "-invalid+key_2": "efg",
+      "$comment": "comment",
+      "$id": "#name",
+      "register": "hij",
+      "schema": "klm",
+      "x-repr": true,
+      "x-abc": true,
+      "example": "example",
+      "readOnly": true
+    },
+    "age": {
+      "type": "integer",
+      "example": 12,
+      "writeOnly": true,
+      "examples": [
+        13,
+        20
+      ]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/field_has_same_name.json 0.34.0-1/tests/data/jsonschema/field_has_same_name.json
--- 0.26.4-3/tests/data/jsonschema/field_has_same_name.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/field_has_same_name.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "title": "Test",
+  "type": "object",
+  "properties": {
+    "TestObject": {
+      "title": "TestObject",
+      "type": "object",
+      "properties": {
+        "test_string": {
+          "type": "string"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/forwarding_reference/commons.json 0.34.0-1/tests/data/jsonschema/forwarding_reference/commons.json
--- 0.26.4-3/tests/data/jsonschema/forwarding_reference/commons.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/forwarding_reference/commons.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "$defs": {
+    "customArray": { "type": "array" }
+  },
+  "$schema": "https://json-schema.org/draft/2020-12/schema"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/forwarding_reference/forwarding.json 0.34.0-1/tests/data/jsonschema/forwarding_reference/forwarding.json
--- 0.26.4-3/tests/data/jsonschema/forwarding_reference/forwarding.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/forwarding_reference/forwarding.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "$defs": {
+    "forwardingArray": {"$ref": "commons.json#/$defs/customArray"}
+  },
+  "$schema": "https://json-schema.org/draft/2020-12/schema"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/forwarding_reference/schema.json 0.34.0-1/tests/data/jsonschema/forwarding_reference/schema.json
--- 0.26.4-3/tests/data/jsonschema/forwarding_reference/schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/forwarding_reference/schema.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "$defs": {
+    "pets": {"$ref": "forwarding.json#/$defs/forwardingArray"}
+  },
+  "$schema": "https://json-schema.org/draft/2020-12/schema"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/has_default_value.json 0.34.0-1/tests/data/jsonschema/has_default_value.json
--- 0.26.4-3/tests/data/jsonschema/has_default_value.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/has_default_value.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,78 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Person",
+  "type": "object",
+  "definitions": {
+    "teamType": {
+      "description": "Person team",
+      "type": "string",
+      "enum": [
+        "Department",
+        "Division",
+        "BusinessUnit",
+        "Organization"
+      ],
+      "default": "Department"
+    },
+    "ID": {
+      "type": "string",
+      "default": "abc"
+    },
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string"
+        }
+      },
+      "default": {
+        "name": "ken"
+      }
+    },
+    "Family": {
+      "type": "array",
+      "items": {
+        "$ref": "#/definitions/ID"
+      },
+      "default": [
+        "abc",
+        "efg"
+      ]
+    },
+    "FamilyPets": {
+      "type": "array",
+      "items": {
+        "$ref": "#/definitions/Pet"
+      },
+      "default": [
+        "taro",
+        "shiro"
+      ]
+    }
+  },
+  "properties": {
+    "id": {
+      "$ref": "#/definitions/ID"
+    },
+    "user": {
+      "$ref": "#/definitions/Pet"
+    },
+    "firstName": {
+      "type": "string",
+      "description": "The person's first name."
+    },
+    "team": {
+      "$ref": "#/definitions/teamType"
+    },
+    "anotherTeam": {
+      "$ref": "#/definitions/teamType",
+      "default": "Department"
+    },
+    "Family": {
+      "$ref": "#/definitions/Family"
+    },
+    "FamilyPets": {
+      "$ref": "#/definitions/FamilyPets"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/id.json 0.34.0-1/tests/data/jsonschema/id.json
--- 0.26.4-3/tests/data/jsonschema/id.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/id.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,23 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+
+  "definitions": {
+    "address": {
+      "$id": "#address",
+      "type": "object",
+      "properties": {
+        "street_address": { "type": "string" },
+        "city":           { "type": "string" },
+        "state":          { "type": "string" }
+      },
+      "required": ["street_address", "city", "state"]
+    }
+  },
+
+  "type": "object",
+
+  "properties": {
+    "billing_address": { "$ref": "#address" },
+    "shipping_address": { "$ref": "#address" }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/ContactPoint.schema.json 0.34.0-1/tests/data/jsonschema/ids/ContactPoint.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/ContactPoint.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/ids/ContactPoint.schema.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,29 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "ContactPoint.schema.json",
+    "title": "ContactPoint",
+    "description": "A contact point—for example, a Customer Complaints department.",
+    "type": "object",
+    "required": [
+        "type",
+        "email"
+    ],
+    "properties": {
+        "type": {
+            "$ref": "type.schema.json",
+            "enum": [
+                "ContactPoint"
+            ]
+        },
+        "contactType": {
+            "type": "string"
+        },
+        "email": {
+            "type": "string",
+            "format": "email"
+        },
+        "telephone": {
+            "type": "string"
+        }
+    }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/Organization.schema.json 0.34.0-1/tests/data/jsonschema/ids/Organization.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/Organization.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/ids/Organization.schema.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,42 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "Organization.schema.json",
+    "title": "Organization",
+    "description": "An organization such as a school, NGO, corporation, club, etc.",
+    "type": "object",
+    "required": [
+        "type",
+        "name"
+    ],
+    "properties": {
+        "id": {
+            "$ref": "id.schema.json"
+        },
+        "type": {
+            "$ref": "type.schema.json",
+            "enum": [
+                "Organization"
+            ]
+        },
+        "name": {
+            "$ref": "name.schema.json"
+        },
+        "contactPoint": {
+            "$ref": "ContactPoint.schema.json"
+        },
+        "sameAs": {
+            "$ref": "sameAs.schema.json"
+        },
+        "url": {
+            "$ref": "URI.schema.json"
+        }
+    },
+    "examples": [
+        {
+            "id": "https://ror.org/02a809t02",
+            "type": "Organization",
+            "name": "Vizzuality",
+            "sameAs": "https://ror.org/02a809t02"
+        }
+    ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/URI.schema.json 0.34.0-1/tests/data/jsonschema/ids/URI.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/URI.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/ids/URI.schema.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,8 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "URI.schema.json",
+    "title": "URI",
+    "description": "String representing a URI.",
+    "type": "string",
+    "format": "uri"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/context.schema.json 0.34.0-1/tests/data/jsonschema/ids/context.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/context.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/ids/context.schema.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "context.schema.json",
+    "title": "context",
+    "description": "A URL that provides descriptions of this objects properties. TODO: Align with full JSON-LD context definition!",
+    "type": "string"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/id.schema.json 0.34.0-1/tests/data/jsonschema/ids/id.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/id.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/ids/id.schema.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "id.schema.json",
+    "title": "id",
+    "description": "Identifier string of this object.",
+    "type": "string"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/name.schema.json 0.34.0-1/tests/data/jsonschema/ids/name.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/name.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/ids/name.schema.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "name.schema.json",
+    "title": "name",
+    "description": "A descriptive (full) name of the entity. For example, a dataset called 'Snow depth in the Northern Hemisphere' or a person called 'Sarah L. Jones' or a place called 'The Empire States Building'. Use unique names for distinct entities whenever possible.",
+    "type": "string"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/sameAs.schema.json 0.34.0-1/tests/data/jsonschema/ids/sameAs.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/sameAs.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/ids/sameAs.schema.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "sameAs.schema.json",
+    "title": "sameAs",
+    "description": "Use the sameAs property to indicate the most canonical URLs for the original in cases of the entity. For example this may be a link to the original metadata of a dataset, definition of a property, Person, Organization or Place.",
+    "$ref": "URI.schema.json"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/schema.schema.json 0.34.0-1/tests/data/jsonschema/ids/schema.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/schema.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/ids/schema.schema.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "schema.schema.json",
+    "title": "schema",
+    "description": "URI of the JSON schema of this object.",
+    "type": "string"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/type.schema.json 0.34.0-1/tests/data/jsonschema/ids/type.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/type.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/ids/type.schema.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "type.schema.json",
+    "type": "string",
+    "title": "type",
+    "description": "Type of this object."
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/imports_correct/schema.json 0.34.0-1/tests/data/jsonschema/imports_correct/schema.json
--- 0.26.4-3/tests/data/jsonschema/imports_correct/schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/imports_correct/schema.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,24 @@
+{
+  "properties": {
+    "inner": {
+      "discriminator": {
+        "mapping": {
+          "a": "./type_1.json",
+          "A": "./type_1.json"
+        },
+        "propertyName": "type_"
+      },
+      "oneOf": [
+        {
+          "$ref": "./type_1.json"
+        }
+      ],
+      "title": "Inner"
+    }
+  },
+  "required": [
+    "inner"
+  ],
+  "title": "Response",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/imports_correct/type_1.json 0.34.0-1/tests/data/jsonschema/imports_correct/type_1.json
--- 0.26.4-3/tests/data/jsonschema/imports_correct/type_1.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/imports_correct/type_1.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "properties": {
+    "type_": {
+      "default": "a",
+      "enum": ["a", "A"],
+
+      "type": "string",
+      "title": "Type"
+    }
+  },
+  "title": "Type1",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/inheritance_forward_ref.json 0.34.0-1/tests/data/jsonschema/inheritance_forward_ref.json
--- 0.26.4-3/tests/data/jsonschema/inheritance_forward_ref.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/inheritance_forward_ref.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,100 @@
+{
+    "title": "PersonsBestFriend",
+    "description": "This is the main model.",
+    "type": "object",
+    "properties": {
+      "people": {
+        "title": "People",
+        "type": "array",
+        "items": {
+          "$ref": "#/definitions/Person"
+        }
+      },
+      "dogs": {
+        "title": "Dogs",
+        "type": "array",
+        "items": {
+          "$ref": "#/definitions/Dog"
+        }
+      },
+      "dog_base": {
+        "$ref": "#/definitions/DogBase"
+      },
+      "dog_relationships": {
+        "$ref": "#/definitions/DogRelationships"
+      },
+      "person_base": {
+        "$ref": "#/definitions/PersonBase"
+      },
+      "person_relationships": {
+        "$ref": "#/definitions/PersonRelationships"
+      }
+    },
+    "definitions": {
+      "Person": {
+        "title": "Person",
+        "allOf": [
+            {"$ref": "#/definitions/PersonBase"},
+            {"$ref": "#/definitions/PersonRelationships"}
+        ]
+      },
+      "Dog": {
+        "title": "Dog",
+        "allOf": [
+            {"$ref": "#/definitions/DogBase"},
+            {"$ref": "#/definitions/DogRelationships"}
+        ]
+      },
+      "DogBase": {
+        "title": "DogBase",
+        "type": "object",
+        "properties": {
+          "name": {
+            "title": "Name",
+            "type": "string"
+          },
+          "woof": {
+            "title": "Woof",
+            "default": true,
+            "type": "boolean"
+          }
+        }
+      },
+      "DogRelationships": {
+        "title": "DogRelationships",
+        "type": "object",
+        "properties": {
+          "people": {
+            "title": "People",
+            "type": "array",
+            "items": {
+              "$ref": "#/definitions/Person"
+            }
+          }
+        }
+      },
+      "PersonBase": {
+        "title": "PersonBase",
+        "type": "object",
+        "properties": {
+          "name": {
+            "title": "Name",
+            "type": "string"
+          }
+        }
+      },
+      "PersonRelationships": {
+        "title": "PersonRelationships",
+        "type": "object",
+        "properties": {
+          "people": {
+            "title": "People",
+            "type": "array",
+            "items": {
+              "$ref": "#/definitions/Person"
+            }
+          }
+        }
+      }
+    }
+  }
diff -pruN 0.26.4-3/tests/data/jsonschema/invalid_enum_name.json 0.34.0-1/tests/data/jsonschema/invalid_enum_name.json
--- 0.26.4-3/tests/data/jsonschema/invalid_enum_name.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/invalid_enum_name.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "InvalidEnum",
+  "type": "string",
+  "enum": [
+    "1 value",
+    " space",
+    "*- special",
+    "schema",
+    "MRO",
+    "mro"
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/invalid_import_name/array-commons.schema.json 0.34.0-1/tests/data/jsonschema/invalid_import_name/array-commons.schema.json
--- 0.26.4-3/tests/data/jsonschema/invalid_import_name/array-commons.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/invalid_import_name/array-commons.schema.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "$id": "https://example.com/array-commons.schema.json",
+  "title": "Commons",
+  "description": "Commons objects",
+  "$defs": {
+    "defaultArray": {
+      "type": "array",
+      "minLength": 1,
+      "maxLength": 100
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/invalid_import_name/products.schema.json 0.34.0-1/tests/data/jsonschema/invalid_import_name/products.schema.json
--- 0.26.4-3/tests/data/jsonschema/invalid_import_name/products.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/invalid_import_name/products.schema.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "$id": "https://example.com/products.schema.json",
+  "title": "Products",
+  "description": "The products in the catalog",
+  "$ref": "array-commons.schema.json#/$defs/defaultArray"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/invalid_model_name.json 0.34.0-1/tests/data/jsonschema/invalid_model_name.json
--- 0.26.4-3/tests/data/jsonschema/invalid_model_name.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/invalid_model_name.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,26 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "1 xyz",
+  "type": "object",
+  "properties": {
+    "firstName": {
+      "type": "string",
+      "description": "The person's first name."
+    },
+    "lastName": {
+      "type": "string",
+      "description": "The person's last name."
+    },
+    "age": {
+      "description": "Age in years which must be equal to or greater than zero.",
+      "type": "integer",
+      "minimum": 0
+    },
+    "friends": {
+      "type": "array"
+    },
+    "comment": {
+      "type": "null"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/items_boolean.json 0.34.0-1/tests/data/jsonschema/items_boolean.json
--- 0.26.4-3/tests/data/jsonschema/items_boolean.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/items_boolean.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,9 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "example": {
+      "items": true
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/json_pointer.json 0.34.0-1/tests/data/jsonschema/json_pointer.json
--- 0.26.4-3/tests/data/jsonschema/json_pointer.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/json_pointer.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,72 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "title": "Person",
+    "type": "object",
+    "properties": {
+        "name": {
+            "title": "name",
+            "type": "string"
+        },
+        "pet": {
+            "title": "pet",
+            "type": "object",
+            "oneOf": [
+                {
+                    "$ref": "#/definitions/Pets/Cat"
+                },
+                {
+                    "$ref": "#/definitions/Pets/Dog"
+                }
+            ]
+        }
+    },
+    "definitions": {
+        "Pets": {
+            "Cat": {
+                "title": "Cat",
+                "type": "object",
+                "required": [
+                    "pet_type",
+                    "hunts",
+                    "age"
+                ],
+                "properties": {
+                    "pet_type": {
+                        "enum": [
+                            "Cat"
+                        ]
+                    },
+                    "hunts": {
+                        "type": "boolean"
+                    },
+                    "age": {
+                        "type": "string"
+                    }
+                }
+            },
+            "Dog": {
+                "title": "Dog",
+                "type": "object",
+                "required": [
+                    "pet_type",
+                    "bark",
+                    "breed"
+                ],
+                "properties": {
+                    "pet_type": {
+                        "enum": [
+                            "Dog"
+                        ]
+                    },
+                    "bark": {
+                        "type": "boolean"
+                    },
+                    "breed": {
+                        "type": "string"
+                    }
+                }
+            }
+        }
+    },
+    "additionalProperties": false
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/json_pointer_array.json 0.34.0-1/tests/data/jsonschema/json_pointer_array.json
--- 0.26.4-3/tests/data/jsonschema/json_pointer_array.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/json_pointer_array.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,62 @@
+
+ {
+  "$schema": "https://json-schema.org/draft/2019-09/schema",
+  "oneOf": [
+    {
+      "definitions": {
+        "email": {
+          "properties": {
+            "email": {
+              "format": "email",
+              "type": "string"
+            }
+          },
+          "required": [
+            "email"
+          ],
+          "type": "object"
+        }
+      },
+      "properties": {
+        "emails": {
+          "items": {
+            "$ref": "#/oneOf/0/definitions/email"
+          },
+          "type": "array"
+        }
+      },
+      "required": [
+        "emails"
+      ],
+      "type": "object"
+    },
+    {
+      "definitions": {
+        "error": {
+          "properties": {
+            "code": {
+              "type": "string"
+            }
+          },
+          "required": [
+            "code"
+          ],
+          "type": "object"
+        }
+      },
+      "properties": {
+        "errors": {
+          "items": {
+            "$ref": "#/oneOf/1/definitions/error"
+          },
+          "type": "array"
+        }
+      },
+      "required": [
+        "errors"
+      ],
+      "type": "object"
+    }
+  ],
+  "type": "object"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/long_description.json 0.34.0-1/tests/data/jsonschema/long_description.json
--- 0.26.4-3/tests/data/jsonschema/long_description.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/long_description.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "LongDescription",
+  "type": "object",
+  "properties": {
+    "summary": {
+      "type": "string",
+      "description": "summary for object"
+    },
+    "description": {
+      "type": "string",
+      "description": "datamodel-code-generator. This code generator creates pydantic model from an openapi file and others."
+    },
+    "multi_line": {
+      "description": "datamodel-code-generator\nThis code generator creates pydantic model from an openapi file and others.\n\n\nSupported source types\nOpenAPI 3 (YAML/JSON, OpenAPI Data Type)\nJSON Schema (JSON Schema Core/JSON Schema Validation)\nJSON/YAML/CSV Data (it will be converted to JSON Schema)\nPython dictionary (it will be converted to JSON Schema)",
+      "type": "string"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/many_case_enum.json 0.34.0-1/tests/data/jsonschema/many_case_enum.json
--- 0.26.4-3/tests/data/jsonschema/many_case_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/many_case_enum.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "string",
+  "enum": [
+    "snake_case",
+    "CAP_CASE",
+    "CamelCase",
+    "UPPERCASE"
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/modular_default_enum_member/bar.json 0.34.0-1/tests/data/jsonschema/modular_default_enum_member/bar.json
--- 0.26.4-3/tests/data/jsonschema/modular_default_enum_member/bar.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/modular_default_enum_member/bar.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "$id": "bar.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Bar",
+  "type": "object",
+  "definitions": {
+    "logLevels": {
+      "description": "Supported logging levels",
+      "type": "string",
+      "enum": ["DEBUG", "INFO", "WARN", "ERROR"],
+      "default": "INFO"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/modular_default_enum_member/foo.json 0.34.0-1/tests/data/jsonschema/modular_default_enum_member/foo.json
--- 0.26.4-3/tests/data/jsonschema/modular_default_enum_member/foo.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/modular_default_enum_member/foo.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+{
+  "$id": "foo.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Foo",
+  "type": "object",
+  "properties": {
+    "loggerLevel": {
+      "$ref": "bar.json#/definitions/logLevels"
+    },
+    "AnotherLoggerLevel": {
+      "$ref": "nested_bar/bar.json#/definitions/logLevels"
+    },
+    "OtherLoggerLevels": {
+      "items": {
+        "$ref": "nested_bar/bar.json#/definitions/logLevels"
+      },
+      "default": ["INFO", "ERROR", "INVALID"]
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/modular_default_enum_member/nested_bar/bar.json 0.34.0-1/tests/data/jsonschema/modular_default_enum_member/nested_bar/bar.json
--- 0.26.4-3/tests/data/jsonschema/modular_default_enum_member/nested_bar/bar.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/modular_default_enum_member/nested_bar/bar.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "$id": "nested_bar/bar.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "NestedBar",
+  "type": "object",
+  "definitions": {
+    "logLevels": {
+      "description": "Supported logging levels Nested Model",
+      "type": "string",
+      "enum": ["DEBUG", "INFO", "ERROR"],
+      "default": "ERROR"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files/file_a.json 0.34.0-1/tests/data/jsonschema/multiple_files/file_a.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files/file_a.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/multiple_files/file_a.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/schema#",
+  "type": "object",
+  "title": "ModelA",
+  "properties": {
+    "firstName": {
+      "type": "string"
+    },
+    "modelB": {
+      "$ref": "file_b.json#"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files/file_b.json 0.34.0-1/tests/data/jsonschema/multiple_files/file_b.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files/file_b.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/multiple_files/file_b.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/schema#",
+  "type": "object",
+  "title": "ModelB",
+  "properties": {
+    "metadata": {
+      "type": "string"
+    }
+  },
+  "required": [
+    "metadata"
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files/file_c.json 0.34.0-1/tests/data/jsonschema/multiple_files/file_c.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files/file_c.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/multiple_files/file_c.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/schema#",
+  "type": "object",
+  "title": "ModelC",
+  "properties": {
+    "firstName": {
+      "type": "string"
+    },
+    "modelB": {
+      "$ref": "file_b.json#"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files/file_d.json 0.34.0-1/tests/data/jsonschema/multiple_files/file_d.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files/file_d.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/multiple_files/file_d.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/schema#",
+  "type": "object",
+  "title": "ModelD",
+  "properties": {
+    "firstName": {
+      "type": "string"
+    },
+    "modelA": {
+      "$ref": "file_a.json#"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files_json_pointer/file_a.json 0.34.0-1/tests/data/jsonschema/multiple_files_json_pointer/file_a.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files_json_pointer/file_a.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/multiple_files_json_pointer/file_a.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,24 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "title": "PersonA",
+    "type": "object",
+    "properties": {
+        "name": {
+            "title": "name",
+            "type": "string"
+        },
+        "pet": {
+            "title": "pet",
+            "type": "object",
+            "oneOf": [
+                {
+                    "$ref": "file_b.json#/definitions/Pets/Cat"
+                },
+                {
+                    "$ref": "file_b.json#definitions/Pets/Dog"
+                }
+            ]
+        }
+    },
+    "additionalProperties": false
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files_json_pointer/file_b.json 0.34.0-1/tests/data/jsonschema/multiple_files_json_pointer/file_b.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files_json_pointer/file_b.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/multiple_files_json_pointer/file_b.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,52 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "definitions": {
+        "Pets": {
+            "Cat": {
+                "title": "Cat",
+                "type": "object",
+                "required": [
+                    "pet_type",
+                    "hunts",
+                    "age"
+                ],
+                "properties": {
+                    "pet_type": {
+                        "enum": [
+                            "Cat"
+                        ]
+                    },
+                    "hunts": {
+                        "type": "boolean"
+                    },
+                    "age": {
+                        "type": "string"
+                    }
+                }
+            },
+            "Dog": {
+                "title": "Dog",
+                "type": "object",
+                "required": [
+                    "pet_type",
+                    "bark",
+                    "breed"
+                ],
+                "properties": {
+                    "pet_type": {
+                        "enum": [
+                            "Dog"
+                        ]
+                    },
+                    "bark": {
+                        "type": "boolean"
+                    },
+                    "breed": {
+                        "type": "string"
+                    }
+                }
+            }
+        }
+    },
+    "additionalProperties": false
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files_json_pointer/file_c.json 0.34.0-1/tests/data/jsonschema/multiple_files_json_pointer/file_c.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files_json_pointer/file_c.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/multiple_files_json_pointer/file_c.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,24 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "title": "PersonC",
+    "type": "object",
+    "properties": {
+        "name": {
+            "title": "name",
+            "type": "string"
+        },
+        "pet": {
+            "title": "pet",
+            "type": "object",
+            "oneOf": [
+                {
+                    "$ref": "file_b.json#/definitions/Pets/Cat"
+                },
+                {
+                    "$ref": "file_b.json#/definitions/Pets/Dog"
+                }
+            ]
+        }
+    },
+    "additionalProperties": false
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files_self_ref/base_test.json 0.34.0-1/tests/data/jsonschema/multiple_files_self_ref/base_test.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files_self_ref/base_test.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/multiple_files_self_ref/base_test.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "$id": "base_test.json",
+  "description": "test",
+  "type": "object",
+  "definitions": {
+    "first": {
+      "$ref": "#/definitions/second"
+    },
+    "second": {
+      "type": "string"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files_self_ref/test.json 0.34.0-1/tests/data/jsonschema/multiple_files_self_ref/test.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files_self_ref/test.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/multiple_files_self_ref/test.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "$id": "test.json",
+  "description": "test",
+  "type": "object",
+  "required": [
+    "test_id",
+    "test_ip",
+    "result"
+  ],
+  "properties": {
+    "test_id": {
+      "type": "string",
+      "description": "test ID"
+    },
+    "test_ip": {
+      "$ref": "base_test.json#/definitions/first"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/nested_all_of.json 0.34.0-1/tests/data/jsonschema/nested_all_of.json
--- 0.26.4-3/tests/data/jsonschema/nested_all_of.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/nested_all_of.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,40 @@
+{
+  "title": "Model",
+  "allOf": [
+    {
+      "type": "object",
+      "properties": {
+        "first": {
+          "type": "string"
+        }
+      },
+      "required": [
+        "first"
+      ]
+    },
+    {
+      "allOf": [
+        {
+          "properties": {
+            "second": {
+              "type": "string"
+            }
+          },
+          "required": [
+            "second"
+          ]
+        },
+        {
+          "properties": {
+            "third": {
+              "type": "string"
+            }
+          },
+          "required": [
+            "third"
+          ]
+        }
+      ]
+    }
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/nested_array.json 0.34.0-1/tests/data/jsonschema/nested_array.json
--- 0.26.4-3/tests/data/jsonschema/nested_array.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/nested_array.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,35 @@
+{
+  "type": "object",
+  "properties": {
+    "bounding_box": {
+      "type": "object",
+      "properties": {
+        "type": {
+          "type": "string"
+        },
+        "coordinates": {
+          "type": "array",
+          "items": {
+            "type": "array",
+            "items": {
+              "type": "array",
+              "items": [{
+                "type": "number"
+              },
+              {
+                "type": "string"
+              }]
+            }
+          }
+        }
+      },
+      "required": [
+        "coordinates",
+        "type"
+      ]
+    },
+    "attributes": {
+      "type": "object"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/nested_array.json.snapshot 0.34.0-1/tests/data/jsonschema/nested_array.json.snapshot
--- 0.26.4-3/tests/data/jsonschema/nested_array.json.snapshot	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/nested_array.json.snapshot	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,8 @@
+class BoundingBox(BaseModel):
+    type: str
+    coordinates: List[List[List[Union[float, str]]]]
+
+
+class Model(BaseModel):
+    bounding_box: Optional[BoundingBox] = None
+    attributes: Optional[Dict[str, Any]] = None
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/nested_json_pointer.json 0.34.0-1/tests/data/jsonschema/nested_json_pointer.json
--- 0.26.4-3/tests/data/jsonschema/nested_json_pointer.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/nested_json_pointer.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,142 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "title": "Person",
+    "type": "object",
+    "properties": {
+        "name": {
+            "title": "name",
+            "type": "string"
+        },
+        "pet": {
+            "title": "pet",
+            "type": "object",
+            "oneOf": [
+                {
+                    "$ref": "#/definitions/Pets/Cat"
+                },
+                {
+                    "$ref": "#/definitions/Pets/Dog"
+                }
+            ]
+        }
+    },
+    "definitions": {
+        "CatBreed":{
+            "C1":
+            {
+                "title":"C1",
+                "type": "object",
+                "properties":
+                {
+                    "hunts": {
+                        "type": "boolean"
+                    },
+                    "age": {
+                        "type": "string"
+                    }
+                }
+            },
+            "C2":
+            {
+                "title":"C2",
+                "type": "object",
+                "properties":
+                {
+                    "hunts": {
+                        "type": "boolean"
+                    },
+                    "age": {
+                        "type": "string"
+                    }
+                }
+            }
+        },
+        "DogBreed":{
+            "D1":
+            {
+                "title":"D1",
+                "type": "object",
+                "properties":
+                {
+                    "bark": {
+                        "type": "boolean"
+                    },
+                    "age": {
+                        "type": "string"
+                    }
+                }
+            },
+            "D2":
+            {
+                "title":"D2",
+                "type": "object",
+                "properties":
+                {
+                    "hunts": {
+                        "type": "boolean"
+                    },
+                    "age": {
+                        "type": "string"
+                    }
+                }
+            }
+        },
+        "Pets": {
+            "Cat": {
+                "title": "Cat",
+                "type": "object",
+                "required": [
+                    "pet_type",
+                    "age"
+                ],
+                "properties": {
+                    "pet_type": {
+                        "enum": [
+                            "Cat"
+                        ]
+                    },
+                    "breed": {
+                        "title": "breed",
+                        "type": "object",
+                        "oneOf": [
+                            {
+                                "$ref": "#/definitions/CatBreed/C1"
+                            },
+                            {
+                                "$ref": "#/definitions/CatBreed/C2"
+                            }
+                        ]
+                    }
+                }
+            },
+            "Dog": {
+                "title": "Dog",
+                "type": "object",
+                "required": [
+                    "pet_type",
+                    "breed"
+                ],
+                "properties": {
+                    "pet_type": {
+                        "enum": [
+                            "Dog"
+                        ]
+                    },
+                    "breed": {
+                        "title": "breed",
+                        "type": "string",
+                        "oneOf": [
+                            {
+                                "$ref": "#/definitions/DogBreed/D1"
+                            },
+                            {
+                                "$ref": "#/definitions/DogBreed/D2"
+                            }
+                        ]
+                    }
+                }
+            }
+        }
+    },
+    "additionalProperties": false
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/nested_person.json 0.34.0-1/tests/data/jsonschema/nested_person.json
--- 0.26.4-3/tests/data/jsonschema/nested_person.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/nested_person.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,42 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "NestedPerson",
+  "type": "object",
+  "properties": {
+    "nested_deep_childJson": {
+      "$ref": "#/definitions/nested.deep.Json"
+    },
+    "nested_deep_childAnother": {
+      "$ref": "#/definitions/nested.deep.Another"
+    },
+    "empty_parent_nested_deep_childJson": {
+      "$ref": "#/definitions/empty_parent.nested.deep.Json"
+    }
+  },
+  "definitions": {
+    "nested.deep.Json": {
+      "type": "object",
+      "properties": {
+        "firstName": {
+          "type": "string"
+        }
+      }
+    },
+    "nested.deep.Another": {
+      "type": "object",
+      "properties": {
+        "firstName": {
+          "type": "string"
+        }
+      }
+    },
+    "empty_parent.nested.deep.Json": {
+      "type": "object",
+      "properties": {
+        "firstName": {
+          "type": "string"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/nested_skip.json 0.34.0-1/tests/data/jsonschema/nested_skip.json
--- 0.26.4-3/tests/data/jsonschema/nested_skip.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/nested_skip.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "NestedSkip",
+  "type": "object",
+  "definitions": {
+    "a.b.c.d.e": {
+      "type": "object",
+      "properties": {
+        "example1": {
+          "type": "string"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/not_required_nullable.json 0.34.0-1/tests/data/jsonschema/not_required_nullable.json
--- 0.26.4-3/tests/data/jsonschema/not_required_nullable.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/not_required_nullable.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Person",
+  "type": "object",
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "null_name": {
+      "type": ["string", "null"]
+    },
+    "age": {
+      "type": "integer"
+    },
+    "null_age": {
+      "type": ["integer", "null"]
+    }
+  },
+  "required": ["name", "null_age"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/null.json 0.34.0-1/tests/data/jsonschema/null.json
--- 0.26.4-3/tests/data/jsonschema/null.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/null.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema",
+  "type": "object",
+  "properties": {
+    "null": {
+      "type": "null"
+    },
+    "nullableString": {
+      "type": ["null", "string"]
+    },
+    "nullableNumber":{
+      "type": ["null", "number", "integer"]
+    },
+    "any": {
+    },
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/null_and_array.json 0.34.0-1/tests/data/jsonschema/null_and_array.json
--- 0.26.4-3/tests/data/jsonschema/null_and_array.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/null_and_array.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,26 @@
+{
+    "$schema": "http://json-schema.org/schema#",
+    "type": "object",
+    "properties": {
+        "my_obj": {
+            "type": "array",
+            "items": {
+                "type": "object",
+                "properties": {
+                    "items": {
+                        "type": [
+                            "array",
+                            "null"
+                        ]
+                    }
+                },
+                "required": [
+                    "items"
+                ]
+            }
+        }
+    },
+    "required": [
+        "my_obj"
+    ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/nullable_any_of.json 0.34.0-1/tests/data/jsonschema/nullable_any_of.json
--- 0.26.4-3/tests/data/jsonschema/nullable_any_of.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/nullable_any_of.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,41 @@
+{
+  "type": "object",
+  "additionalProperties": false,
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "$id": "id1",
+  "title": "Validating Schema ID1",
+  "properties": {
+    "in": {
+      "type": "object",
+      "additionalProperties": false,
+      "properties": {
+        "input_dataset_path": {
+          "type": "string",
+          "minLength": 1,
+          "title": "Path to the input dataset",
+          "description": "d1"
+        },
+        "config": {
+          "anyOf": [
+            {
+              "type": "string",
+              "minLength": 1,
+              "title": "t2",
+              "description": "d2"
+            },
+            {
+              "type": [
+                "null"
+              ],
+              "title": "t3",
+              "description": "d3"
+            }
+          ]
+        }
+      }
+    },
+    "n1": {
+      "type": "integer"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/nullable_object.json 0.34.0-1/tests/data/jsonschema/nullable_object.json
--- 0.26.4-3/tests/data/jsonschema/nullable_object.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/nullable_object.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,41 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "id": "config_schema_v3.7.json",
+  "type": "object",
+  "required": [
+    "networks"
+  ],
+  "properties": {
+    "networks": {
+      "id": "#/properties/networks",
+      "type": "object",
+      "patternProperties": {
+        "^[a-zA-Z0-9._-]+$": {
+          "$ref": "#/definitions/network"
+        }
+      }
+    }
+  },
+  "patternProperties": {
+    "^x-": {}
+  },
+  "additionalProperties": false,
+  "definitions": {
+    "network": {
+      "id": "#/definitions/network",
+      "type": [
+        "object",
+        "null"
+      ],
+      "properties": {
+        "name": {
+          "type": "string"
+        }
+      },
+      "patternProperties": {
+        "^x-": {}
+      },
+      "additionalProperties": false
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/object_has_one_of.json 0.34.0-1/tests/data/jsonschema/object_has_one_of.json
--- 0.26.4-3/tests/data/jsonschema/object_has_one_of.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/object_has_one_of.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,82 @@
+{
+    "$schema": "https://json-schema.org/draft/2019-09/schema",
+    "type": "object",
+    "title": "v2_test",
+    "additionalProperties": true,
+    "oneOf": [
+        {
+            "type": "object",
+            "properties": {
+                "field_1": {
+                    "enum": [
+                        "response_1"
+                    ]
+                }
+            },
+            "additionalProperties": true,
+            "oneOf": [
+                {
+                    "type": "object",
+                    "properties": {
+                        "field_2": {
+                            "enum": [
+                                "response_a"
+                            ]
+                        }
+                    },
+                    "additionalProperties": true,
+                    "required": [
+                        "field_2"
+                    ]
+                }
+            ],
+            "required": [
+                "field_1"
+            ]
+        },
+        {
+            "type": "object",
+            "properties": {
+                "field_1": {
+                    "enum": [
+                        "response_2"
+                    ]
+                }
+            },
+            "additionalProperties": true,
+            "oneOf": [
+                {
+                    "type": "object",
+                    "properties": {
+                        "field_2": {
+                            "enum": [
+                                "response_b"
+                            ]
+                        }
+                    },
+                    "additionalProperties": true,
+                    "required": [
+                        "field_2"
+                    ]
+                },
+                {
+                    "type": "object",
+                    "properties": {
+                        "field_2": {
+                            "enum": [
+                                "response_c"
+                            ]
+                        }
+                    },
+                    "additionalProperties": true,
+                    "required": [
+                        "field_2"
+                    ]
+                }
+            ],
+            "required": [
+                "field_1"
+            ]
+        }
+    ]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/one_of_with_sub_schema_array_item.json 0.34.0-1/tests/data/jsonschema/one_of_with_sub_schema_array_item.json
--- 0.26.4-3/tests/data/jsonschema/one_of_with_sub_schema_array_item.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/one_of_with_sub_schema_array_item.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+  "title": "SpatialPlan",
+  "type": "object",
+  "properties": {
+    "officialDocument": {
+      "title": "officialDocument",
+      "description": "Link to the official documents that relate to the spatial plan.",
+      "oneOf": [
+        {
+          "type": "string"
+        },
+        {
+          "type": "array",
+          "minItems": 1,
+          "items": {
+            "type": "string",
+            "format": "uri"
+          },
+          "uniqueItems": true
+        }
+      ]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/oneof.json 0.34.0-1/tests/data/jsonschema/oneof.json
--- 0.26.4-3/tests/data/jsonschema/oneof.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/oneof.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+{
+    "properties": {
+        "item": {
+            "properties": {
+                "timeout": {
+                    "oneOf": [
+                        {
+                            "type": "string"
+                        },
+                        {
+                            "type": "integer"
+                        }
+                    ]
+                }
+            },
+            "type": "object"
+        }
+    }
+}
+
diff -pruN 0.26.4-3/tests/data/jsonschema/oneof.json.snapshot 0.34.0-1/tests/data/jsonschema/oneof.json.snapshot
--- 0.26.4-3/tests/data/jsonschema/oneof.json.snapshot	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/oneof.json.snapshot	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,6 @@
+class Item(BaseModel):
+    timeout: Optional[Union[str, int]] = None
+
+
+class OnOfObject(BaseModel):
+    item: Optional[Item] = None
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/pattern.json 0.34.0-1/tests/data/jsonschema/pattern.json
--- 0.26.4-3/tests/data/jsonschema/pattern.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/pattern.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,23 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Info",
+  "type": "object",
+  "properties": {
+    "hostName": {
+      "type": "string",
+      "format": "hostname"
+    },
+    "arn": {
+      "type": "string",
+      "pattern": "(^arn:([^:]*):([^:]*):([^:]*):(|\\*|[\\d]{12}):(.+)$)|^\\*$"
+    },
+    "tel": {
+      "type": "string",
+      "pattern": "^(\\([0-9]{3}\\))?[0-9]{3}-[0-9]{4}$"
+    },
+    "comment": {
+        "type": "string",
+        "pattern": "[^\b\f\n\r\t\\\\a+.?'\"|()]+$"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/pattern_properties.json 0.34.0-1/tests/data/jsonschema/pattern_properties.json
--- 0.26.4-3/tests/data/jsonschema/pattern_properties.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/pattern_properties.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,28 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Foo",
+  "type": "object",
+  "required": [
+     "bar"
+  ],
+  "properties": {
+    "bar": {
+      "type": "object",
+      "patternProperties": {
+        "^([a-zA-Z_][a-zA-Z0-9_]*)$": {
+          "$ref": "#/definitions/Bar"
+        }
+      }
+    }
+  },
+  "definitions": {
+    "Bar": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/pattern_properties_by_reference.json 0.34.0-1/tests/data/jsonschema/pattern_properties_by_reference.json
--- 0.26.4-3/tests/data/jsonschema/pattern_properties_by_reference.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/pattern_properties_by_reference.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,54 @@
+
+{
+  "$defs": {
+    "stt": {
+      "additionalProperties": false,
+      "description": "STT properties allows to configure how the user voice is converted to text",
+      "properties": {
+        "timeout": {
+          "title": "STT Timeout",
+          "type": "number"
+        }
+      },
+      "title": "Speech to text Settings",
+      "type": "object"
+    },
+    "textResponse": {
+      "additionalProperties": false,
+      "type": "object",
+      "patternProperties": {
+        "^[a-z]{1}[0-9]{1}$": {
+          "phoneNumber": {
+            "additionalProperties": false,
+            "type": "number"
+          }
+        }
+      }
+    }
+  },
+  "$schema": "http://json-schema.org/draft-07/schema",
+  "additionalProperties": false,
+  "description": "The root schema comprises the entire JSON document",
+  "properties": {
+    "KeyWithExplicitPatternProperties": {
+      "additionalProperties": false,
+      "type": "object",
+      "patternProperties": {
+        "^[a-z]{1}[0-9]{1}$": {
+          "phoneNumber": {
+            "additionalProperties": false,
+            "type": "number"
+          }
+        }
+      }
+    },
+    "KeyWithPatternPropertiesByReference": {
+      "$ref": "#/$defs/textResponse"
+    },
+    "SomeOtherBoringReference": {
+      "$ref": "#/$defs/stt"
+    }
+  },
+  "title": "SomeSchema Schema",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/person.json 0.34.0-1/tests/data/jsonschema/person.json
--- 0.26.4-3/tests/data/jsonschema/person.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/person.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,26 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Person",
+  "type": "object",
+  "properties": {
+    "firstName": {
+      "type": "string",
+      "description": "The person's first name."
+    },
+    "lastName": {
+      "type": ["string", "null"],
+      "description": "The person's last name."
+    },
+    "age": {
+      "description": "Age in years which must be equal to or greater than zero.",
+      "type": "integer",
+      "minimum": 0
+    },
+    "friends": {
+      "type": "array"
+    },
+    "comment": {
+      "type": "null"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/require_referenced_field/referenced.json 0.34.0-1/tests/data/jsonschema/require_referenced_field/referenced.json
--- 0.26.4-3/tests/data/jsonschema/require_referenced_field/referenced.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/require_referenced_field/referenced.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema#",
+    "$id": "test",
+    "description": "test",
+    "type": "object",
+    "properties": {
+        "some_optional_property": {
+            "type": "string"
+        },
+        "some_optional_typed_property": {
+            "type": "string",
+            "format":"date-time"
+        }
+    }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/require_referenced_field/required.json 0.34.0-1/tests/data/jsonschema/require_referenced_field/required.json
--- 0.26.4-3/tests/data/jsonschema/require_referenced_field/required.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/require_referenced_field/required.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,12 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema#",
+    "$id": "test",
+    "description": "test",
+    "type": "object",
+    "allOf": [
+      {
+        "$ref": "referenced.json"
+      }
+    ],
+    "required" : ["some_optional_property", "some_optional_typed_property"]
+  }
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/required_and_any_of_required.json 0.34.0-1/tests/data/jsonschema/required_and_any_of_required.json
--- 0.26.4-3/tests/data/jsonschema/required_and_any_of_required.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/required_and_any_of_required.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,37 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "required": [
+    "foo"
+  ],
+  "properties": {
+    "foo": {
+      "type": "object",
+      "required": [
+        "bar"
+      ],
+      "anyOf": [{
+          "required": [
+            "baz"
+          ]
+        },
+        {
+          "required": [
+            "qux"
+          ]
+        }
+      ],
+      "properties": {
+        "bar": {
+          "type": "integer"
+        },
+        "baz": {
+          "type": "integer"
+        },
+        "qux": {
+          "type": "integer"
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/root_id.json 0.34.0-1/tests/data/jsonschema/root_id.json
--- 0.26.4-3/tests/data/jsonschema/root_id.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/root_id.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,24 @@
+{
+  "$id": "https://example.com/root_id.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Person": {
+      "$ref": "person.json"
+    },
+    "OriginalPerson": {
+      "$ref": "person.json"
+    },
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "examples": ["dog", "cat"]
+        },
+        "owner": {
+           "$ref": "person.json"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/root_id_absolute_url.json 0.34.0-1/tests/data/jsonschema/root_id_absolute_url.json
--- 0.26.4-3/tests/data/jsonschema/root_id_absolute_url.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/root_id_absolute_url.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,24 @@
+{
+  "$id": "https://example.com/root_id.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Person": {
+      "$ref": "person.json"
+    },
+    "OriginalPerson": {
+      "$ref": "person.json"
+    },
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "examples": ["dog", "cat"]
+        },
+        "owner": {
+           "$ref": "https://example.com/person.json"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/root_id_ref.json 0.34.0-1/tests/data/jsonschema/root_id_ref.json
--- 0.26.4-3/tests/data/jsonschema/root_id_ref.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/root_id_ref.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Person": {
+      "$ref": "root_id.json#/definitions/Person"
+    },
+    "OriginalPerson": {
+      "$ref": "root_id.json#/definitions/OriginalPerson"
+    },
+    "Pet": {
+      "$ref": "root_id.json#/definitions/Pet"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/root_id_self_ref.json 0.34.0-1/tests/data/jsonschema/root_id_self_ref.json
--- 0.26.4-3/tests/data/jsonschema/root_id_self_ref.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/root_id_self_ref.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,24 @@
+{
+  "$id": "https://example.com/root_id_self_ref.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Person": {
+      "$ref": "person.json"
+    },
+    "OriginalPerson": {
+      "$ref": "person.json"
+    },
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "examples": ["dog", "cat"]
+        },
+        "owner": {
+           "$ref": "https://example.com/root_id_self_ref.json#/definitions/Person"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/root_model_with_additional_properties.json 0.34.0-1/tests/data/jsonschema/root_model_with_additional_properties.json
--- 0.26.4-3/tests/data/jsonschema/root_model_with_additional_properties.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/root_model_with_additional_properties.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,127 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "$id": "test.json",
+  "description": "test",
+  "type": "object",
+  "required": [
+    "test_id",
+    "test_ip",
+    "result",
+    "nested_object_result",
+    "nested_enum_result"
+  ],
+  "properties": {
+    "test_id": {
+      "type": "string",
+      "description": "test ID"
+    },
+    "test_ip": {
+      "type": "string",
+      "description": "test IP"
+    },
+    "result": {
+      "type": "object",
+      "additionalProperties": {
+        "type": "integer"
+      }
+    },
+    "nested_object_result": {
+      "type": "object",
+      "additionalProperties": {
+        "type": "object",
+        "properties": {
+          "status":{
+            "type": "integer"
+          }
+        },
+        "required": ["status"]
+      }
+    },
+    "nested_enum_result": {
+      "type": "object",
+      "additionalProperties": {
+        "enum": ["red", "green"]
+      }
+    },
+    "all_of_result" :{
+      "type" : "object",
+      "additionalProperties" :
+      {
+        "allOf" : [
+          { "$ref" : "#/definitions/User" },
+          { "type" : "object",
+            "properties": {
+              "description": {"type" : "string" }
+            }
+          }
+        ]
+      }
+    },
+    "one_of_result" :{
+      "type" : "object",
+      "additionalProperties" :
+      {
+        "oneOf" : [
+          { "$ref" : "#/definitions/User" },
+          { "type" : "object",
+            "properties": {
+              "description": {"type" : "string" }
+            }
+          }
+        ]
+      }
+    },
+    "any_of_result" :{
+      "type" : "object",
+      "additionalProperties" :
+      {
+        "anyOf" : [
+          { "$ref" : "#/definitions/User" },
+          { "type" : "object",
+            "properties": {
+              "description": {"type" : "string" }
+            }
+          }
+        ]
+      }
+    },
+    "all_of_with_unknown_object" :{
+      "type" : "object",
+      "additionalProperties" :
+      {
+        "allOf" : [
+          { "$ref" : "#/definitions/User" },
+          { "description": "TODO" }
+        ]
+      }
+    },
+    "objectRef": {
+      "type": "object",
+      "additionalProperties": {
+        "$ref": "#/definitions/User"
+      }
+    },
+    "deepNestedObjectRef": {
+      "type": "object",
+      "additionalProperties": {
+        "type": "object",
+        "additionalProperties": {
+          "type": "object",
+          "additionalProperties": {
+             "$ref": "#/definitions/User"
+          }
+        }
+      }
+    }
+  },
+  "definitions": {
+    "User": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/root_one_of/bar.json 0.34.0-1/tests/data/jsonschema/root_one_of/bar.json
--- 0.26.4-3/tests/data/jsonschema/root_one_of/bar.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/root_one_of/bar.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "$id": "bar.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "JobRun",
+  "description": "Scheduled Execution Context Configuration.",
+  "type": "object",
+  "properties": {
+    "enabled": {
+      "description": "If Live Execution is Enabled.",
+      "type": "boolean",
+      "default": false
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/root_one_of/foo.json 0.34.0-1/tests/data/jsonschema/root_one_of/foo.json
--- 0.26.4-3/tests/data/jsonschema/root_one_of/foo.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/root_one_of/foo.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+{
+  "$id": "foo.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "JobRun",
+  "description": "Live Execution object.",
+  "type": "object",
+  "properties": {
+    "enabled": {
+      "description": "If Live Execution is enabled",
+      "type": "boolean",
+      "default": false
+    },
+    "resources": {
+      "description": "Resource full classname to register to extend any endpoints.",
+      "type": "array",
+      "items": {
+        "type": "string"
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/root_one_of/union.json 0.34.0-1/tests/data/jsonschema/root_one_of/union.json
--- 0.26.4-3/tests/data/jsonschema/root_one_of/union.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/root_one_of/union.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,27 @@
+{
+  "$id": "union.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "App",
+  "description": "This schema defines the applications for Open-Metadata.",
+  "type": "object",
+  "definitions": {
+    "executionContext": {
+      "description": "Execution Configuration.",
+      "oneOf": [
+        {
+          "$ref": "./foo.json"
+        },
+        {
+          "$ref": "./bar.json"
+        }
+      ],
+      "additionalProperties": false
+    }
+  },
+  "properties": {
+    "runtime": {
+      "description": "Execution Configuration.",
+      "$ref": "#/definitions/executionContext"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/same_name_objects.json 0.34.0-1/tests/data/jsonschema/same_name_objects.json
--- 0.26.4-3/tests/data/jsonschema/same_name_objects.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/same_name_objects.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+{
+  "$id": "https://example.com/same_name_objects.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "friends": {
+      "type": "object",
+      "additionalProperties": false
+    },
+    "tst1": {
+        "$ref": "person.json#/properties/friends"
+    },
+    "tst2": {
+        "$ref": "person.json#/properties/friends"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/self_reference.json 0.34.0-1/tests/data/jsonschema/self_reference.json
--- 0.26.4-3/tests/data/jsonschema/self_reference.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/self_reference.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,23 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "examples": ["dog", "cat"]
+        },
+        "friends": {
+          "type": "array",
+          "minItems": 1,
+          "items": [
+            {
+              "$ref": "#/definitions/Pet"
+            }
+          ]
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/similar_nested_array.json 0.34.0-1/tests/data/jsonschema/similar_nested_array.json
--- 0.26.4-3/tests/data/jsonschema/similar_nested_array.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/similar_nested_array.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,93 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "ObjectA": {
+      "properties": {
+        "data": {
+          "items": {
+            "properties": {
+              "keyA": {
+                "type": "string"
+              }
+            },
+            "type": "object"
+          },
+          "type": "array"
+        }
+      },
+      "type": "object"
+    },
+    "ObjectB": {
+      "properties": {
+        "data": {
+          "items": {
+            "properties": {
+              "keyB": {
+                "type": "string"
+              }
+            },
+            "type": "object"
+          },
+          "type": "array"
+        }
+      },
+      "type": "object"
+    },
+    "ObjectC": {
+      "properties": {
+        "keyC": {
+          "anyOf": [
+              {
+                "type": "object",
+                "properties": {
+                  "nestedA": {
+                    "type": "string"
+                  }
+                }
+              },
+            {
+                "type": "object",
+                "properties": {
+                  "nestedB": {
+                    "type": "string"
+                  }
+                }
+              }
+          ]
+        }
+      }
+    },
+    "ObjectD": {
+      "properties": {
+        "keyC": {
+          "items": [
+              {
+                "type": "object",
+                "properties": {
+                  "nestedA": {
+                    "type": "string"
+                  }
+                }
+              },
+            {
+                "type": "object",
+                "properties": {
+                  "nestedB": {
+                    "type": "string"
+                  }
+                }
+              },
+            {
+                "type": "string",
+                "enum": ["dog", "cat", "snake"]
+              },
+                        {
+                "type": "string",
+                "enum": ["orange", "apple", "milk"]
+              }
+          ]
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/simple_frozen_test.json 0.34.0-1/tests/data/jsonschema/simple_frozen_test.json
--- 0.26.4-3/tests/data/jsonschema/simple_frozen_test.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/simple_frozen_test.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "title": "User",
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "age": {
+      "type": "integer"
+    },
+    "email": {
+      "type": "string",
+      "format": "email"
+    }
+  },
+  "required": ["name", "age"]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/simple_string.json 0.34.0-1/tests/data/jsonschema/simple_string.json
--- 0.26.4-3/tests/data/jsonschema/simple_string.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/simple_string.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema",
+  "type": "object",
+  "properties": {"s": {"type": ["string"]}},
+  "required": ["s"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/space_field_enum.json 0.34.0-1/tests/data/jsonschema/space_field_enum.json
--- 0.26.4-3/tests/data/jsonschema/space_field_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/space_field_enum.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "SpaceIF": {
+      "$ref": "#/definitions/SpaceIF"
+    }
+  },
+  "definitions": {
+    "SpaceIF": {
+      "type": "string",
+      "enum": [
+        "Space Field"
+      ]
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/special_enum.json 0.34.0-1/tests/data/jsonschema/special_enum.json
--- 0.26.4-3/tests/data/jsonschema/special_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/special_enum.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "string",
+  "enum": [
+    true,
+    false,
+    "",
+    "\n",
+    "\r\n",
+    "\t",
+    "\b",
+    null,
+    "\\"
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/special_field_name.json 0.34.0-1/tests/data/jsonschema/special_field_name.json
--- 0.26.4-3/tests/data/jsonschema/special_field_name.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/special_field_name.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "SpecialField",
+  "type": "object",
+  "properties": {
+    "global": {
+      "type": "string"
+    },
+    "with": {
+      "type": "string"
+    },
+    "class": {
+      "type": "integer"
+    },
+    "class's": {
+      "type": "integer"
+    },
+    "class-s": {
+      "type": "string"
+    },
+    "#": {
+      "type": "string"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/special_field_name_with_inheritance_model.json 0.34.0-1/tests/data/jsonschema/special_field_name_with_inheritance_model.json
--- 0.26.4-3/tests/data/jsonschema/special_field_name_with_inheritance_model.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/special_field_name_with_inheritance_model.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,51 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "SpecialField",
+  "type": "object",
+  "properties": {
+    "global": {
+      "type": "string"
+    },
+    "with": {
+      "type": "string"
+    },
+    "class": {
+      "type": "integer"
+    },
+    "class's": {
+      "type": "integer"
+    },
+    "class-s": {
+      "type": "string"
+    },
+    "#": {
+      "type": "string"
+    }
+  },
+  "allOf": [
+    {
+      "$ref": "#/definitions/base"
+    }
+  ],
+  "definitions": {
+    "base": {
+      "allOf": [
+        {
+          "$ref": "#/definitions/nestedBase"
+        }
+      ],
+      "properties": {
+        "name": {
+          "type": "string"
+        }
+      }
+    },
+    "nestedBase": {
+      "properties": {
+        "age": {
+          "type": "string"
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/special_prefix_model.json 0.34.0-1/tests/data/jsonschema/special_prefix_model.json
--- 0.26.4-3/tests/data/jsonschema/special_prefix_model.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/special_prefix_model.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+{
+    "$id": "schema_v2.json",
+    "$schema": "http://json-schema.org/schema#",
+
+    "type": "object",
+    "properties": {
+        "@id": {
+            "type": "string",
+            "format": "uri",
+            "pattern": "^http.*$",
+            "title": "Id must be presesnt and must be a URI"
+        },
+        "@type": { "type": "string" },
+        "@+!type": { "type": "string" },
+        "@-!type": { "type": "string" },
+        "profile": { "type": "string" }
+    },
+    "required": ["@id", "@type"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/strict_types.json 0.34.0-1/tests/data/jsonschema/strict_types.json
--- 0.26.4-3/tests/data/jsonschema/strict_types.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/strict_types.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,50 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "User",
+  "type": "object",
+  "properties": {
+        "name": {
+          "type": "string",
+          "example": "ken"
+        },
+        "age": {
+          "type": "integer"
+        },
+        "salary": {
+          "type": "integer",
+          "minimum": 0
+        },
+        "debt" : {
+          "type": "integer",
+          "maximum": 0
+        },
+        "loan" : {
+          "type": "number",
+          "maximum": 0
+        },
+        "tel": {
+          "type": "string",
+          "pattern": "^(\\([0-9]{3}\\))?[0-9]{3}-[0-9]{4}$"
+        },
+        "height": {
+          "type": "number",
+          "minimum": 0
+        },
+        "weight": {
+          "type": "number",
+          "minimum": 0
+        },
+        "score": {
+          "type": "number",
+          "minimum": 1e-08
+        },
+        "active": {
+          "type": "boolean"
+        },
+        "photo": {
+          "type": "string",
+          "format": "binary",
+          "minLength": 100
+        }
+      }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/string_dict.json 0.34.0-1/tests/data/jsonschema/string_dict.json
--- 0.26.4-3/tests/data/jsonschema/string_dict.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/string_dict.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "type": "object",
+  "propertyNames": {
+    "type": "string"
+  },
+  "additionalProperties": {
+    "type": "string"
+  },
+  "title": "MyStringDict"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/subclass_enum.json 0.34.0-1/tests/data/jsonschema/subclass_enum.json
--- 0.26.4-3/tests/data/jsonschema/subclass_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/subclass_enum.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,51 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "IntEnum": {
+      "type": "integer",
+      "enum": [
+        1,
+        2,
+        3
+      ]
+    },
+    "FloatEnum": {
+      "type": "number",
+      "enum": [
+        1.1,
+        2.1,
+        3.1
+      ]
+    },
+    "StrEnum": {
+      "type": "string",
+      "enum": [
+        "1",
+        "2",
+        "3"
+      ]
+    },
+    "NonTypedEnum": {
+      "enum": [
+        "1",
+        "2",
+        "3"
+      ]
+    },
+    "BooleanEnum": {
+      "type": "boolean",
+      "enum": [
+        true,
+        false
+      ]
+    },
+    "UnknownEnum": {
+      "type": "unknown",
+      "enum": [
+        "a",
+        "b"
+      ]
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/titles.json 0.34.0-1/tests/data/jsonschema/titles.json
--- 0.26.4-3/tests/data/jsonschema/titles.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/titles.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,91 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "ProcessingStatus": {
+      "title": "Processing Status Title",
+      "enum": [
+        "COMPLETED",
+        "PENDING",
+        "FAILED"
+      ],
+      "type": "string",
+      "description": "The processing status"
+    },
+    "kind": {
+      "type": "string"
+    },
+    "ExtendedProcessingTask": {
+      "title": "Extended Processing Task Title",
+      "oneOf": [
+        {
+          "$ref": "#"
+        },
+        {
+          "type": "object",
+          "title": "NestedCommentTitle",
+          "properties": {
+            "comment": {
+              "type": "string"
+            }
+          }
+        }
+      ]
+    },
+    "ExtendedProcessingTasks": {
+      "title": "Extended Processing Tasks Title",
+      "type": "array",
+      "items": [
+        {
+          "$ref": "#/definitions/ExtendedProcessingTask"
+        }
+      ]
+    },
+    "ProcessingTask": {
+      "title": "Processing Task Title",
+      "type": "object",
+      "properties": {
+        "processing_status_union": {
+          "title": "Processing Status Union Title",
+          "oneOf": [
+            {
+              "title": "Processing Status Detail",
+              "type": "object",
+              "properties": {
+                "id": {
+                  "type": "integer"
+                },
+                "description": {
+                  "type": "string"
+                }
+              }
+            },
+            {
+              "$ref": "#/definitions/ExtendedProcessingTask"
+            },
+            {
+              "$ref": "#/definitions/ProcessingStatus"
+            }
+          ],
+          "default": "COMPLETED"
+        },
+        "processing_status": {
+          "$ref": "#/definitions/ProcessingStatus",
+          "default": "COMPLETED"
+        },
+        "name": {
+          "type": "string"
+        },
+        "kind": {
+          "$ref": "#/definitions/kind"
+        }
+      }
+    }
+  },
+  "title": "Processing Tasks Title",
+  "type": "array",
+      "items": [
+        {
+          "$ref": "#/definitions/ProcessingTask"
+        }
+      ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.basic_enum.json 0.34.0-1/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.basic_enum.json
--- 0.26.4-3/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.basic_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.basic_enum.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+{
+  "definitions": {
+    "ProcessingStatus": {
+      "title": "ProcessingStatus",
+      "enum": [
+        "COMPLETED",
+        "PENDING",
+        "FAILED"
+      ],
+      "type": "string",
+      "description": "The processing status"
+    }
+  },
+  "properties": {
+    "processing_status": {
+      "$ref": "#/definitions/ProcessingStatus",
+      "default": "COMPLETED"
+    },
+    "name": {
+      "type": "string"
+    }
+  },  
+  "title": "Basic Enum",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.input.json 0.34.0-1/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.input.json
--- 0.26.4-3/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.input.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.input.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "properties": {
+    "input": {
+      "default": "input",
+      "title": "Input"
+    },
+    "extType": {
+      "$ref": "schema.json"
+    }
+  },
+  "title": "Input",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.output.json 0.34.0-1/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.output.json
--- 0.26.4-3/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.output.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.output.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "properties": {
+    "output": {
+      "default": "output",
+      "title": "Output"
+    }
+  },
+  "title": "Output",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/treat_dot_as_module/complex.directory/schema.json 0.34.0-1/tests/data/jsonschema/treat_dot_as_module/complex.directory/schema.json
--- 0.26.4-3/tests/data/jsonschema/treat_dot_as_module/complex.directory/schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/treat_dot_as_module/complex.directory/schema.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "properties": {
+    "ExtType": {
+      "type": "a",
+      "title": "ExtType"
+    }
+  },
+  "title": "ExtType",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/use_default_with_const.json 0.34.0-1/tests/data/jsonschema/use_default_with_const.json
--- 0.26.4-3/tests/data/jsonschema/use_default_with_const.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/use_default_with_const.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,10 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema#",
+    "type": "object",
+    "title": "Use default with const",
+    "properties": {
+        "foo": {
+            "const": "foo"
+        }
+    }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/user.json 0.34.0-1/tests/data/jsonschema/user.json
--- 0.26.4-3/tests/data/jsonschema/user.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/user.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,33 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "User": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "example": "ken"
+        },
+        "pets": {
+          "type": "array",
+          "items": {
+            "$ref": "#/definitions/User"
+          },
+          "default_factory": "list"
+        }
+      }
+    },
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "examples": [
+            "dog",
+            "cat"
+          ]
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/user_default.json 0.34.0-1/tests/data/jsonschema/user_default.json
--- 0.26.4-3/tests/data/jsonschema/user_default.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/user_default.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,34 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "User": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "example": "ken"
+        },
+        "pets": {
+          "type": "array",
+          "items": {
+            "$ref": "#/definitions/User"
+          },
+          "default": ["dog", "cat"]
+        }
+      }
+    },
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "examples": [
+            "dog",
+            "cat"
+          ],
+          "default": "dog"
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/user_defs.json 0.34.0-1/tests/data/jsonschema/user_defs.json
--- 0.26.4-3/tests/data/jsonschema/user_defs.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/user_defs.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,33 @@
+{
+  "$schema": "https://json-schema.org/draft/2019-09/schema",
+  "$defs": {
+    "User": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "example": "ken"
+        },
+        "pets": {
+          "type": "array",
+          "items": {
+            "$ref": "#/$defs/User"
+          },
+          "default_factory": "list"
+        }
+      }
+    },
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "examples": [
+            "dog",
+            "cat"
+          ]
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/without_titles.json 0.34.0-1/tests/data/jsonschema/without_titles.json
--- 0.26.4-3/tests/data/jsonschema/without_titles.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/jsonschema/without_titles.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,83 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "ProcessingStatus": {
+      "enum": [
+        "COMPLETED",
+        "PENDING",
+        "FAILED"
+      ],
+      "type": "string",
+      "description": "The processing status"
+    },
+    "kind": {
+      "type": "string"
+    },
+    "ExtendedProcessingTask": {
+      "oneOf": [
+        {
+          "$ref": "#"
+        },
+        {
+          "type": "object",
+          "properties": {
+            "comment": {
+              "type": "string"
+            }
+          }
+        }
+      ]
+    },
+    "ExtendedProcessingTasks": {
+      "type": "array",
+      "items": [
+        {
+          "$ref": "#/definitions/ExtendedProcessingTask"
+        }
+      ]
+    },
+    "ProcessingTask": {
+      "type": "object",
+      "properties": {
+        "processing_status_union": {
+          "oneOf": [
+               {
+              "type": "object",
+              "properties": {
+                "id": {
+                  "type": "integer"
+                },
+                "description": {
+                  "type": "string"
+                }
+              }
+            },
+            {
+              "$ref": "#/definitions/ExtendedProcessingTask"
+            },
+            {
+              "$ref": "#/definitions/ProcessingStatus"
+            }
+          ],
+          "default": "COMPLETED"
+        },
+        "processing_status": {
+          "$ref": "#/definitions/ProcessingStatus",
+          "default": "COMPLETED"
+        },
+        "name": {
+          "type": "string"
+        },
+        "kind": {
+          "$ref": "#/definitions/kind"
+        }
+      }
+    }
+  },
+  "type": "array",
+      "items": [
+        {
+          "$ref": "#/definitions/ProcessingTask"
+        }
+      ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/additional_properties.yaml 0.34.0-1/tests/data/openapi/additional_properties.yaml
--- 0.26.4-3/tests/data/openapi/additional_properties.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/additional_properties.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,203 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    Users:
+      type: array
+      items:
+        required:
+          - id
+          - name
+        properties:
+          id:
+            type: integer
+            format: int64
+          name:
+            type: string
+          tag:
+            type: string
+        additionalProperties: true
+    Id:
+      type: string
+    Rules:
+      type: array
+      items:
+        type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+      additionalProperties: false
+    Event:
+      type: object
+      properties:
+        name:
+          type: string
+    Result:
+        type: object
+        properties:
+          event:
+            $ref: '#/components/schemas/Event'
+    test:
+      type: object
+      properties:
+        broken:
+          type: object
+          additionalProperties:
+            $ref: '#/components/schemas/broken'
+        failing:
+          type: object
+          additionalProperties:
+            type: string
+          default: { }
+    broken:
+      type: object
+      properties:
+        foo:
+          type: string
+        bar:
+          type: integer
+    brokenArray:
+      type: object
+      properties:
+        broken:
+          type: object
+          additionalProperties:
+            type: array
+            items:
+              $ref: '#/components/schemas/broken'
+    FileSetUpload:
+      title: FileSetUpload
+      required:
+        - tags
+      type: object
+      properties:
+        task_id:
+            title: 'task id'
+            type: string
+        tags:
+            title: 'Dict of tags, each containing a list of file names'
+            type: object
+            additionalProperties:
+              type: array
+              items:
+                type: string
diff -pruN 0.26.4-3/tests/data/openapi/alias.yaml 0.34.0-1/tests/data/openapi/alias.yaml
--- 0.26.4-3/tests/data/openapi/alias.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/alias.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,312 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      enum:
+        - ca-t
+        - dog*
+      type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    user-name:
+      properties:
+        first-name:
+          type: string
+        home-address:
+          $ref: "#/components/schemas/home-address"
+    home-address:
+      properties:
+        address-1:
+          type: string
+    team-members:
+      items:
+        type: string
+    all-of-ref:
+      allOf:
+        - $ref: "#/components/schemas/user-name"
+        - $ref: "#/components/schemas/home-address"
+    all-of-obj:
+      allOf:
+        - type: object
+          properties:
+            name:
+              type: string
+        - type: object
+          properties:
+            number:
+              type: string
+    all-of-combine:
+      allOf:
+        - $ref: "#/components/schemas/user-name"
+        - type: object
+          properties:
+            birth-date:
+              type: string
+              format: date
+            size:
+              type: integer
+              minimum: 1
+    any-of-combine:
+      allOf:
+        - $ref: "#/components/schemas/home-address"
+        - $ref: "#/components/schemas/user-name"
+        - type: object
+          properties:
+            age:
+              type: string
+    any-of-combine-in-object:
+      type: object
+      properties:
+        item:
+          allOf:
+            - $ref: "#/components/schemas/home-address"
+            - $ref: "#/components/schemas/user-name"
+            - type: object
+              properties:
+                age:
+                  type: string
+    any-of-combine-in-array:
+      type: array
+      items:
+        allOf:
+          - $ref: "#/components/schemas/home-address"
+          - $ref: "#/components/schemas/user-name"
+          - type: object
+            properties:
+              age:
+                type: string
+    any-of-combine-in-root:
+      allOf:
+        - $ref: "#/components/schemas/home-address"
+        - $ref: "#/components/schemas/user-name"
+        - type: object
+          properties:
+            age:
+              type: string
+            birth-date:
+              type: string
+              format: date-time
+    model-s.Specie-s:
+      type: string
+      enum:
+        - dog
+        - cat
+        - snake
+    model-s.Pe-t:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+        species:
+          $ref: '#/components/schemas/model-s.Specie-s'
+    model-s.Use-r:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    collection-s.Pet-s:
+      type: array
+      items:
+        $ref: "#/components/schemas/model-s.Pe-t"
+    collection-s.User-s:
+      type: array
+      items:
+        $ref: "#/components/schemas/model-s.Use-r"
+    Id:
+      type: string
+    collection-s.Rule-s:
+      type: array
+      items:
+        type: string
+    collection-s.api-s:
+      type: array
+      items:
+        type: object
+        properties:
+          apiKey:
+            type: string
+            description: To be used as a dataset parameter value
+          apiVersionNumber:
+            type: string
+            description: To be used as a version parameter value
+          apiUrl:
+            type: string
+            format: uri
+            description: "The URL describing the dataset's fields"
+          apiDocumentationUrl:
+            type: string
+            format: uri
+            description: A URL to the API console for each API
+    model-s.Even-t:
+      type: object
+      properties:
+        name:
+          anyOf:
+            - type: string
+            - type: number
+            - type: integer
+            - type: boolean
+            - type: object
+            - type: array
+              items:
+                type: string
+    Result:
+      type: object
+      properties:
+        event:
+          $ref: '#/components/schemas/model-s.Even-t'
+    fo-o.ba-r.Thin-g:
+      properties:
+        attribute-s:
+          type: object
+    fo-o.ba-r.Than-g:
+      properties:
+        attributes:
+          type: array
+          items:
+            type: object
+    fo-o.ba-r.Clon-e:
+      allOf:
+        - $ref: '#/components/schemas/fo-o.ba-r.Thin-g'
+    fo-o.Te-a:
+      properties:
+        flavour-name:
+          type: string
+        id:
+          $ref: '#/components/schemas/Id'
+    Source:
+      properties:
+        country-name:
+          type: string
+    fo-o.Coco-a:
+      properties:
+        quality:
+          type: integer
+    wo-o.bo-o.Chocolat-e:
+      properties:
+        flavour-name:
+          type: string
+        sourc-e:
+          $ref: '#/components/schemas/Source'
+        coco-a:
+          $ref: '#/components/schemas/fo-o.Coco-a'
diff -pruN 0.26.4-3/tests/data/openapi/aliases.json 0.34.0-1/tests/data/openapi/aliases.json
--- 0.26.4-3/tests/data/openapi/aliases.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/aliases.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,4 @@
+{
+  "name": "name_",
+  "id": "id_"
+}
diff -pruN 0.26.4-3/tests/data/openapi/all_of_with_relative_ref/openapi.yaml 0.34.0-1/tests/data/openapi/all_of_with_relative_ref/openapi.yaml
--- 0.26.4-3/tests/data/openapi/all_of_with_relative_ref/openapi.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/all_of_with_relative_ref/openapi.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+openapi: 3.1.0
+paths: {}
+components:
+  schemas:
+    Animals:
+      $ref: ./schema/animal.yaml
+    Pets:
+      $ref: ./schema/pet.yaml
+servers:
+  - url: /api
+info:
+  title: Example
+  version: "1.0"
+  description: Example API
+tags:
+  - name: Animals
+    description: Information about Animals.
+  - name: Pets
+    description: Information about Pets.
diff -pruN 0.26.4-3/tests/data/openapi/all_of_with_relative_ref/schema/animal.yaml 0.34.0-1/tests/data/openapi/all_of_with_relative_ref/schema/animal.yaml
--- 0.26.4-3/tests/data/openapi/all_of_with_relative_ref/schema/animal.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/all_of_with_relative_ref/schema/animal.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,10 @@
+title: Animal
+type: object
+additionalProperties: false
+properties:
+  kind:
+    type: string
+    description: The kind of the animal
+    enum:
+      - CAT
+      - DOG
diff -pruN 0.26.4-3/tests/data/openapi/all_of_with_relative_ref/schema/pet.yaml 0.34.0-1/tests/data/openapi/all_of_with_relative_ref/schema/pet.yaml
--- 0.26.4-3/tests/data/openapi/all_of_with_relative_ref/schema/pet.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/all_of_with_relative_ref/schema/pet.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,9 @@
+title: Pet
+type: object
+additionalProperties: false
+properties:
+  kind:
+    description: The kind of the pet
+    type: string
+    allOf:
+      - $ref: ./animal.yaml#/properties/kind
diff -pruN 0.26.4-3/tests/data/openapi/allof.yaml 0.34.0-1/tests/data/openapi/allof.yaml
--- 0.26.4-3/tests/data/openapi/allof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/allof.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,225 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Car:
+      required:
+        - number
+      properties:
+        number:
+          type: string
+    AllOfref:
+      allOf:
+        - $ref: "#/components/schemas/Pet"
+        - $ref: "#/components/schemas/Car"
+    AllOfNested1:
+      allOf:
+       - $ref: "#/components/schemas/AllOfNested2"
+       - type: object
+         properties:
+           name:
+             $ref: "#/components/schemas/AllOfCombine"
+    AllOfNested2:
+      allOf:
+       - $ref: "#/components/schemas/AllOfNested3"
+       - type: object
+         properties:
+           name:
+             $ref: "#/components/schemas/AllOfNested1"
+    AllOfNested3:
+      allOf:
+       - $ref: "#/components/schemas/AllOfCombine"
+       - type: object
+         properties:
+           name:
+             $ref: "#/components/schemas/AnyOfCombine"
+    AllOfobj:
+      allOf:
+        - type: object
+          properties:
+            name:
+              type: string
+        - type: object
+          properties:
+            number:
+              type: string
+    AllOfCombine:
+      allOf:
+        - $ref: "#/components/schemas/Pet"
+        - type: object
+          properties:
+            birthdate:
+              type: string
+              format: date
+            size:
+              type: integer
+              minimum: 1
+    AnyOfCombine:
+        allOf:
+          - $ref: "#/components/schemas/Pet"
+          - $ref: "#/components/schemas/Car"
+          - type: object
+            properties:
+              age:
+                type: string
+    AnyOfCombineInObject:
+      type: object
+      properties:
+        item:
+          allOf:
+            - $ref: "#/components/schemas/Pet"
+            - $ref: "#/components/schemas/Car"
+            - type: object
+              properties:
+                age:
+                  type: string
+    AnyOfCombineInArray:
+      type: array
+      items:
+        allOf:
+          - $ref: "#/components/schemas/Pet"
+          - $ref: "#/components/schemas/Car"
+          - type: object
+            properties:
+              age:
+                type: string
+    AnyOfCombineInRoot:
+      allOf:
+        - $ref: "#/components/schemas/Pet"
+        - $ref: "#/components/schemas/Car"
+        - type: object
+          properties:
+            age:
+              type: string
+            birthdate:
+              type: string
+              format: date-time
+    AnyOfCombineUnknownObjectInRoot:
+      type: array
+      items:
+        allOf:
+          - $ref: "#/components/schemas/Pet"
+          - description: 'TODO'
+    AnyOfCombineUnknownObjectInArray:
+      allOf:
+        - $ref: "#/components/schemas/Pet"
+        - description: 'TODO'
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/allof_required.yaml 0.34.0-1/tests/data/openapi/allof_required.yaml
--- 0.26.4-3/tests/data/openapi/allof_required.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/allof_required.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,30 @@
+openapi: 3.0.0
+info:
+  title: "no title"
+  version: "no version"
+paths: {}
+components:
+  schemas:
+    Foo:
+      type: object
+      required:
+        - a
+        - b
+      properties:
+        a:
+          type: string
+        b:
+          type: string
+
+    Bar:
+      type: object
+      properties:
+        type:
+          type: string
+          pattern: service
+        name:
+          type: string
+      allOf:
+        - $ref: '#/components/schemas/Foo'
+        - required:
+            - type
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/allof_required_fields.yaml 0.34.0-1/tests/data/openapi/allof_required_fields.yaml
--- 0.26.4-3/tests/data/openapi/allof_required_fields.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/allof_required_fields.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    EmailMessage:
+      title: Email message
+      description: |
+        An email message. There must be at least one recipient in `to`, `cc`, or `bcc`.
+      type: object
+      required:
+        - allOf:
+            - message
+            - subject
+            - to
+      properties:
+        message:
+          type: string
+          description: The email message text.
+        subject:
+          type: string
+          description: The subject line of the email.
+        to:
+          type: array
+          description: A list of email addresses.
+          items:
+            type: string
diff -pruN 0.26.4-3/tests/data/openapi/allof_same_prefix_with_ref.yaml 0.34.0-1/tests/data/openapi/allof_same_prefix_with_ref.yaml
--- 0.26.4-3/tests/data/openapi/allof_same_prefix_with_ref.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/allof_same_prefix_with_ref.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,30 @@
+openapi: 3.0.3
+info:
+  title: Foo
+  version: "1.0"
+paths:
+  /:
+    get:
+      responses:
+        '200':
+          description: ''
+components:
+  schemas:
+    Foo:
+      type: object
+      properties:
+        foo_bar:
+          allOf:
+          - $ref: '#/components/schemas/FooBarBaz'
+
+    FooBar:
+      type: object
+      properties:
+        id:
+          type: integer
+
+    FooBarBaz:
+      type: object
+      properties:
+        id:
+          type: integer
diff -pruN 0.26.4-3/tests/data/openapi/any.yaml 0.34.0-1/tests/data/openapi/any.yaml
--- 0.26.4-3/tests/data/openapi/any.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/any.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,34 @@
+info:
+  title: FastAPI
+  version: 0.1.0
+openapi: 3.0.2
+paths:
+  /:
+    post:
+      operationId: read_root__post
+      requestBody:
+        content:
+          application/json:
+            schema:
+              $ref: '#/components/schemas/Item'
+        required: true
+      responses:
+        '200':
+          content:
+            application/json:
+              schema: {}
+          description: Successful Response
+      summary: Read Root
+components:
+  schemas:
+    Item:
+      properties:
+        bar:
+          title: Bar
+        foo:
+          title: Foo
+          type: string
+      required:
+      - foo
+      title: Item
+      type: object
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/anyof.yaml 0.34.0-1/tests/data/openapi/anyof.yaml
--- 0.26.4-3/tests/data/openapi/anyof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/anyof.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,183 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Car:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    AnyOfItem:
+      anyOf:
+        - $ref: "#/components/schemas/Pet"
+        - $ref: "#/components/schemas/Car"
+        - type: object
+          properties:
+              name:
+                type: string
+        - type: string
+          maxLength: 5000
+    AnyOfobj:
+      type: object
+      properties:
+        item:
+          anyOf:
+            - $ref: "#/components/schemas/Pet"
+            - $ref: "#/components/schemas/Car"
+            - type: object
+              properties:
+                name:
+                  type: string
+            - type: string
+              maxLength: 5000
+    AnyOfArray:
+      type: array
+      items:
+          anyOf:
+            - $ref: "#/components/schemas/Pet"
+            - $ref: "#/components/schemas/Car"
+            - type: object
+              properties:
+                name:
+                  type: string
+                birthday:
+                  type: string
+                  format: date
+            - type: string
+              maxLength: 5000
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    Config:
+      properties:
+        setting:
+          type: object
+          additionalProperties:
+            anyOf:
+            - type: string
+            - type: array
+              items:
+                type: string
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/anyof_required.yaml 0.34.0-1/tests/data/openapi/anyof_required.yaml
--- 0.26.4-3/tests/data/openapi/anyof_required.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/anyof_required.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,37 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    EmailMessage:
+      title: Email message
+      description: |
+        An email message. There must be at least one recipient in `to`, `cc`, or `bcc`.
+      type: object
+      required:
+        - message
+        - subject
+        - anyOf:
+            - to
+            - cc
+            - bcc
+      properties:
+        bcc:
+          type: array
+          items:
+            type: string
+          description: A list of "blind carbon copy" email addresses.
+        cc:
+          type: array
+          items:
+            type: string
+          description: A list of "carbon copy" email addresses.
+        message:
+          type: string
+          description: The email message text.
+        subject:
+          type: string
+          description: The subject line of the email.
+        to:
+          type: array
+          description: A list of email addresses.
+          items:
+            type: string
diff -pruN 0.26.4-3/tests/data/openapi/api.yaml 0.34.0-1/tests/data/openapi/api.yaml
--- 0.26.4-3/tests/data/openapi/api.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/api.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,179 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+          default: 1
+        name:
+          type: string
+        tag:
+          type: string
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    Users:
+      type: array
+      items:
+        required:
+          - id
+          - name
+        properties:
+          id:
+            type: integer
+            format: int64
+          name:
+            type: string
+          tag:
+            type: string
+    Id:
+      type: string
+    Rules:
+      type: array
+      items:
+        type: string
+    Error:
+      description: error result
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    apis:
+      type: array
+      items:
+        type: object
+        properties:
+          apiKey:
+            type: string
+            description: To be used as a dataset parameter value
+          apiVersionNumber:
+            type: string
+            description: To be used as a version parameter value
+          apiUrl:
+            type: string
+            format: uri
+            description: "The URL describing the dataset's fields"
+          apiDocumentationUrl:
+            type: string
+            format: uri
+            description: A URL to the API console for each API
+    Event:
+      type: object
+      description: Event object
+      properties:
+        name:
+          type: string
+    Result:
+        type: object
+        properties:
+          event:
+            $ref: '#/components/schemas/Event'
diff -pruN 0.26.4-3/tests/data/openapi/api_constrained.yaml 0.34.0-1/tests/data/openapi/api_constrained.yaml
--- 0.26.4-3/tests/data/openapi/api_constrained.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/api_constrained.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,229 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+            minimum: 0
+            maximum: 100
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+          minimum: 0
+          maximum: 9223372036854775807
+        name:
+          type: string
+          maxLength: 256
+        tag:
+          type: string
+          maxLength: 64
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+      maxItems: 10
+      minItems: 1
+      uniqueItems: true
+    UID:
+      type: integer
+      minimum: 0
+    Users:
+      type: array
+      items:
+        required:
+          - id
+          - name
+          - uid
+        properties:
+          id:
+            type: integer
+            format: int64
+            minimum: 0
+          name:
+            type: string
+            maxLength: 256
+          tag:
+            type: string
+            maxLength: 64
+          uid:
+            $ref: '#/components/schemas/UID'
+          phones:
+            type: array
+            items:
+              type: string
+              minLength: 3
+            maxItems: 10
+          fax:
+            type: array
+            items:
+              type: string
+              minLength: 3
+          height:
+            type:
+              - integer
+              - number
+            minimum: 1
+            maximum: 300
+          weight:
+            type:
+              - number
+              - integer
+            minimum: 1.0
+            maximum: 1000.0
+          age:
+            type: integer
+            minimum: 0.0
+            maximum: 200.0
+            exclusiveMinimum: true
+          rating:
+            type: number
+            minimum: 0
+            exclusiveMinimum: True
+            maximum: 5
+
+    Id:
+      type: string
+    Rules:
+      type: array
+      items:
+        type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    apis:
+      type: array
+      items:
+        type: object
+        properties:
+          apiKey:
+            type: string
+            description: To be used as a dataset parameter value
+          apiVersionNumber:
+            type: string
+            description: To be used as a version parameter value
+          apiUrl:
+            type: string
+            format: uri
+            minLength: 1
+            description: "The URL describing the dataset's fields"
+          apiDocumentationUrl:
+            type: string
+            format: uri
+            description: A URL to the API console for each API
+    Event:
+      type: object
+      properties:
+        name:
+          type: string
+    Result:
+        type: object
+        properties:
+          event:
+            $ref: '#/components/schemas/Event'
diff -pruN 0.26.4-3/tests/data/openapi/api_multiline_docstrings.yaml 0.34.0-1/tests/data/openapi/api_multiline_docstrings.yaml
--- 0.26.4-3/tests/data/openapi/api_multiline_docstrings.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/api_multiline_docstrings.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,179 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+          default: 1
+        name:
+          type: string
+        tag:
+          type: string
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    Users:
+      type: array
+      items:
+        required:
+          - id
+          - name
+        properties:
+          id:
+            type: integer
+            format: int64
+          name:
+            type: string
+          tag:
+            type: string
+    Id:
+      type: string
+    Rules:
+      type: array
+      items:
+        type: string
+    Error:
+      description: "error result.\nNow with multi-line docstrings."
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    apis:
+      type: array
+      items:
+        type: object
+        properties:
+          apiKey:
+            type: string
+            description: "To be used as a dataset parameter value.\nNow also with multi-line docstrings."
+          apiVersionNumber:
+            type: string
+            description: To be used as a version parameter value
+          apiUrl:
+            type: string
+            format: uri
+            description: "The URL describing the dataset's fields"
+          apiDocumentationUrl:
+            type: string
+            format: uri
+            description: A URL to the API console for each API
+    Event:
+      type: object
+      description: Event object
+      properties:
+        name:
+          type: string
+    Result:
+        type: object
+        properties:
+          event:
+            $ref: '#/components/schemas/Event'
diff -pruN 0.26.4-3/tests/data/openapi/api_ordered_required_fields.yaml 0.34.0-1/tests/data/openapi/api_ordered_required_fields.yaml
--- 0.26.4-3/tests/data/openapi/api_ordered_required_fields.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/api_ordered_required_fields.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,182 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+        - beforeTag
+      properties:
+        id:
+          type: integer
+          format: int64
+          default: 1
+        name:
+          type: string
+        beforeTag:
+          type: string
+        tag:
+          type: string
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    Users:
+      type: array
+      items:
+        required:
+          - id
+          - name
+        properties:
+          id:
+            type: integer
+            format: int64
+          name:
+            type: string
+          tag:
+            type: string
+    Id:
+      type: string
+    Rules:
+      type: array
+      items:
+        type: string
+    Error:
+      description: error result
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    apis:
+      type: array
+      items:
+        type: object
+        properties:
+          apiKey:
+            type: string
+            description: To be used as a dataset parameter value
+          apiVersionNumber:
+            type: string
+            description: To be used as a version parameter value
+          apiUrl:
+            type: string
+            format: uri
+            description: "The URL describing the dataset's fields"
+          apiDocumentationUrl:
+            type: string
+            format: uri
+            description: A URL to the API console for each API
+    Event:
+      type: object
+      description: Event object
+      properties:
+        name:
+          type: string
+    Result:
+        type: object
+        properties:
+          event:
+            $ref: '#/components/schemas/Event'
diff -pruN 0.26.4-3/tests/data/openapi/array_called_fields_with_oneOf_items.yaml 0.34.0-1/tests/data/openapi/array_called_fields_with_oneOf_items.yaml
--- 0.26.4-3/tests/data/openapi/array_called_fields_with_oneOf_items.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/array_called_fields_with_oneOf_items.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+# input.yaml
+components:
+  schemas:
+    BadSchema:
+      type: object
+      properties:
+        fields:
+          type: array
+          items:
+            oneOf:
+            - type: object
+              properties:
+                a:
+                  type: string
+            - type: object
+              properties:
+                b:
+                  type: string
+                  pattern: "^[a-zA-Z_]+$"
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/array_enum.yaml 0.34.0-1/tests/data/openapi/array_enum.yaml
--- 0.26.4-3/tests/data/openapi/array_enum.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/array_enum.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+openapi: 3.0.0
+info:
+  title: datamodel-code-generator bug example
+components:
+  schemas:
+    Type1:
+      type: array
+      items:
+        type: string
+        enum:
+          - enumOne
+          - enumTwo
+
+    Type2:
+      type: string
+      enum:
+        - enumFour
+        - enumFive
diff -pruN 0.26.4-3/tests/data/openapi/body_and_parameters.yaml 0.34.0-1/tests/data/openapi/body_and_parameters.yaml
--- 0.26.4-3/tests/data/openapi/body_and_parameters.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/body_and_parameters.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,364 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+  description: |
+    This description is for testing
+    multi-line
+    description
+
+servers:
+  - url: http://petstore.swagger.io/v1
+security:
+  - BearerAuth: []
+paths:
+  /pets:
+    $ref: '#/components/pathItems/Pets'
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    put:
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      summary: update a pet
+      tags:
+        - pets
+      requestBody:
+        required: false
+        content:
+          application/json:
+            schema:
+              $ref: '#/components/schemas/PetForm'
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+  /food:
+    post:
+      summary: Create a food
+      tags:
+        - pets
+      requestBody:
+        required: true
+        content:
+          application/problem+json:
+            schema:
+              type: string
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/problem+json:
+              schema:
+                type: string
+  /food/{food_id}:
+    get:
+      summary: Info for a specific pet
+      operationId: showFoodById
+      tags:
+        - foods
+      parameters:
+        - name: food_id
+          in: path
+          description: The id of the food to retrieve
+          schema:
+            type: string
+        - name: message_texts
+          in: query
+          required: false
+          explode: true
+          schema:
+            type: array
+            items:
+              type: string
+      responses:
+        '200':
+          description: OK
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  type: integer
+              examples:
+                example-1:
+                  value:
+                    - 0
+                    - 1
+                    - 3
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+  /foo:
+    get:
+      tags:
+        - foo
+      responses:
+        '200':
+          description: OK
+          content:
+            application/json:
+              schema:
+                type: string
+    parameters:
+    - $ref: '#/components/parameters/MyParam'
+  /bar:
+    post:
+      summary: Create a bar
+      tags:
+        - bar
+      requestBody:
+        content:
+          application/x-www-form-urlencoded:
+            schema:
+              $ref: '#/components/schemas/PetForm'
+  /user:
+    get:
+      tags:
+        - user
+      responses:
+        '200':
+          description: OK
+          content:
+            application/json:
+              schema:
+                type: object
+                properties:
+                  timestamp:
+                    type: string
+                    format: date-time
+                  name:
+                    type: string
+                  age:
+                    type: string
+                required:
+                  - name
+                  - timestamp
+    post:
+      tags:
+        - user
+      requestBody:
+        required: true
+        content:
+          application/json:
+            schema:
+              type: object
+              properties:
+                timestamp:
+                  type: string
+                  format: date-time
+                name:
+                  type: string
+                age:
+                  type: string
+              required:
+                - name
+                - timestamp
+      responses:
+        '201':
+          description: OK
+  /users:
+    get:
+      tags:
+        - user
+      responses:
+        '200':
+          description: OK
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  type: object
+                  properties:
+                    timestamp:
+                      type: string
+                      format: date-time
+                    name:
+                      type: string
+                    age:
+                      type: string
+                  required:
+                    - name
+                    - timestamp
+    post:
+      tags:
+        - user
+      requestBody:
+        required: true
+        content:
+          application/json:
+            schema:
+              type: array
+              items:
+                type: object
+                properties:
+                  timestamp:
+                    type: string
+                    format: date-time
+                  name:
+                    type: string
+                  age:
+                    type: string
+                required:
+                  - name
+                  - timestamp
+      responses:
+        '201':
+          description: OK
+components:
+  parameters:
+    MyParam:
+      name: foo
+      in: query
+      schema:
+        type: string
+  securitySchemes:
+    BearerAuth:
+      type: http
+      scheme: bearer
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    PetForm:
+      title: PetForm
+      type: object
+      properties:
+        name:
+          type: string
+        age:
+          type: integer
+  pathItems:
+    Pets:
+      get:
+        summary: List all pets
+        operationId: listPets
+        tags:
+          - pets
+        security: []
+        parameters:
+          - name: limit
+            in: query
+            description: How many items to return at one time (max 100)
+            required: false
+            schema:
+              default: 0
+              type: integer
+              format: int32
+          - name: HomeAddress
+            in: query
+            required: false
+            schema:
+              default: 'Unknown'
+              type: string
+          - name: kind
+            in: query
+            required: false
+            schema:
+              default: dog
+              type: string
+        responses:
+          '200':
+            description: A paged array of pets
+            headers:
+              x-next:
+                description: A link to the next page of responses
+                schema:
+                  type: string
+            content:
+              application/json:
+                schema:
+                  type: array
+                  items:
+                  - $ref: "#/components/schemas/Pet"
+          default:
+            description: unexpected error
+            content:
+              application/json:
+                schema:
+                  $ref: "#/components/schemas/Error"
+      post:
+        summary: Create a pet
+        tags:
+          - pets
+        requestBody:
+          required: true
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/PetForm'
+        responses:
+          '201':
+            description: Null response
+          default:
+            description: unexpected error
+            content:
+              application/json:
+                schema:
+                  $ref: "#/components/schemas/Error"
diff -pruN 0.26.4-3/tests/data/openapi/body_and_parameters_remote_ref.yaml 0.34.0-1/tests/data/openapi/body_and_parameters_remote_ref.yaml
--- 0.26.4-3/tests/data/openapi/body_and_parameters_remote_ref.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/body_and_parameters_remote_ref.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,263 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+  description: |
+    This description is for testing
+    multi-line
+    description
+
+servers:
+  - url: http://petstore.swagger.io/v1
+security:
+  - BearerAuth: []
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      security: []
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            default: 0
+            type: integer
+            format: int32
+        - name: HomeAddress
+          in: query
+          required: false
+          schema:
+            default: 'Unknown'
+            type: string
+        - name: kind
+          in: query
+          required: false
+          schema:
+            default: dog
+            type: string
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                 - $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    post:
+      summary: Create a pet
+      tags:
+        - pets
+      requestBody:
+        $ref: 'https://schema.example#/components/requestBodies/Pet'
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    put:
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      summary: update a pet
+      tags:
+        - pets
+      requestBody:
+        required: false
+        content:
+          application/json:
+            schema:
+              $ref: 'https://schema.example#/components/schemas/PetForm'
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+  /food:
+    post:
+      summary: Create a food
+      tags:
+        - pets
+      requestBody:
+        required: true
+        content:
+          application/problem+json:
+            schema:
+              type: string
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/problem+json:
+              schema:
+                type: string
+  /food/{food_id}:
+    get:
+      summary: Info for a specific pet
+      operationId: showFoodById
+      tags:
+        - foods
+      parameters:
+        - name: food_id
+          in: path
+          description: The id of the food to retrieve
+          schema:
+            type: string
+        - name: message_texts
+          in: query
+          required: false
+          explode: true
+          schema:
+            type: array
+            items:
+              type: string
+      responses:
+        '200':
+          description: OK
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  type: integer
+              examples:
+                example-1:
+                  value:
+                    - 0
+                    - 1
+                    - 3
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+  /foo:
+    get:
+      tags:
+        - foo
+      responses:
+        200:
+          $ref: 'https://schema.example#/components/responses/OK'
+    parameters:
+      - $ref: 'https://schema.example#/components/parameters/MyParam'
+components:
+  parameters:
+    MyParam:
+      name: foo
+      in: query
+      schema:
+        type: string
+  responses:
+    OK:
+      description: OK
+      content:
+        application/json:
+          schema:
+            type: string
+  requestBodies:
+    Pet:
+      required: true
+      content:
+        application/json:
+          schema:
+            $ref: '#/components/schemas/PetForm'
+  securitySchemes:
+    BearerAuth:
+      type: http
+      scheme: bearer
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    PetForm:
+      title: PetForm
+      type: object
+      properties:
+        name:
+          type: string
+        age:
+          type: integer
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/complex_reference.json 0.34.0-1/tests/data/openapi/complex_reference.json
--- 0.26.4-3/tests/data/openapi/complex_reference.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/complex_reference.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,58 @@
+{
+    "openapi": "3.0.0",
+    "components": {
+        "schemas": {
+            "A": {
+                "properties": {
+                    "a_property": {
+                        "$ref": "#/components/schemas/B1"
+                    },
+                },
+                "type": "object"
+            },
+            "B1": {
+                "allOf": [
+                    {
+                        "$ref": "#/components/schemas/A"
+                    }
+                ],
+                "type": "object"
+            },
+            "C1": {
+                "allOf": [
+                    {
+                        "$ref": "#/components/schemas/B1"
+                    }
+                ],
+                "type": "object"
+            },
+            "B2": {
+                "allOf": [
+                    {
+                        "$ref": "#/components/schemas/A"
+                    }
+                ],
+                "type": "object"
+            },
+            "D1": {
+                "allOf": [
+                    {
+                        "$ref": "#/components/schemas/C1"
+                    }
+                ],
+                "type": "object"
+            },
+            "D1andB2": {
+                "allOf": [
+                    {
+                        "$ref": "#/components/schemas/D1"
+                    },
+                    {
+                        "$ref": "#/components/schemas/B2"
+                    }
+                ],
+                "type": "object"
+            }
+        }
+    }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/const.json 0.34.0-1/tests/data/openapi/const.json
--- 0.26.4-3/tests/data/openapi/const.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/const.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+{
+  "components": {
+    "schemas": {
+      "Namespace": {
+        "type": "object",
+        "required": [
+          "apiVersion",
+          "kind"
+        ],
+        "properties": {
+          "apiVersion": {
+            "const": "v1"
+          },
+          "kind": {
+            "const": "Namespace"
+          }
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/const.yaml 0.34.0-1/tests/data/openapi/const.yaml
--- 0.26.4-3/tests/data/openapi/const.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/const.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,14 @@
+openapi: '3.0.2'
+components:
+  schemas:
+    ApiVersion:
+      description: The version of this API
+      type: string
+      const: v1
+    Api:
+      type: object
+      required:
+        - version
+      properties:
+        version:
+          $ref: "#/components/schemas/ApiVersion"
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/content_in_parameters.yaml 0.34.0-1/tests/data/openapi/content_in_parameters.yaml
--- 0.26.4-3/tests/data/openapi/content_in_parameters.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/content_in_parameters.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,73 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+  description: |
+    This description is for testing
+    multi-line
+    description
+
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /{supiOrSuci}/security-information-rg:
+    get:
+      summary: Get authentication data for the FN-RG
+      operationId: GetRgAuthData
+      tags:
+        - Get Auth Data for FN-RG
+      responses:
+        '200':
+          description: Null response
+      parameters:
+        - name: plmn-id
+          in: query
+          description: serving PLMN ID
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Pet'
+components:
+  parameters:
+    MyParam:
+      name: foo
+      in: query
+      schema:
+        type: string
+  securitySchemes:
+    BearerAuth:
+      type: http
+      scheme: bearer
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    PetForm:
+      title: PetForm
+      type: object
+      properties:
+        name:
+          type: string
+        age:
+          type: integer
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/custom_id.yaml 0.34.0-1/tests/data/openapi/custom_id.yaml
--- 0.26.4-3/tests/data/openapi/custom_id.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/custom_id.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,12 @@
+openapi: 3.0.0
+components:
+  schemas:
+    CustomId:
+      description: My custom ID
+      type: string
+      format: uuid
+    Model:
+      type: object
+      properties:
+        custom_id:
+          $ref: "#/components/schemas/CustomId"
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/datetime.yaml 0.34.0-1/tests/data/openapi/datetime.yaml
--- 0.26.4-3/tests/data/openapi/datetime.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/datetime.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    InventoryItem:
+      required:
+#      - id
+#      - name
+      - releaseDate
+      type: object
+      properties:
+#        id:
+#          type: string
+#          format: uuid
+#          example: d290f1ee-6c54-4b01-90e6-d701748f0851
+#        name:
+#          type: string
+#          example: Widget Adapter
+        releaseDate:
+          type: string
+          format: date-time
+          example: 2016-08-29T09:12:33.001Z
diff -pruN 0.26.4-3/tests/data/openapi/default_object.yaml 0.34.0-1/tests/data/openapi/default_object.yaml
--- 0.26.4-3/tests/data/openapi/default_object.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/default_object.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,81 @@
+openapi: 3.0.3
+info:
+  title: Example
+  version: 0.1.0
+servers:
+- url: http://example.com
+paths:
+  /foo:
+    delete:
+      responses:
+        '200':
+          description: OK
+components:
+  schemas:
+    Foo:
+      type: object
+      properties:
+        text:
+          type: string
+          default: "987"
+        number:
+          type: number
+    Bar:
+      type: object
+      properties:
+        foo:
+          allOf:
+          - $ref: '#/components/schemas/Foo'
+          default:
+            text: abc
+            number: 123
+        baz:
+          type: array
+          items:
+            $ref: '#/components/schemas/Foo'
+          default:
+            - text: abc
+              number: 123
+            - text: efg
+              number: 456
+    Nested.Foo:
+      type: string
+    Nested.Bar:
+      type: object
+      properties:
+        foo:
+          allOf:
+            - $ref: '#/components/schemas/Foo'
+          default:
+            text: abc
+            number: 123
+        baz:
+          type: array
+          items:
+            $ref: '#/components/schemas/Foo'
+          default:
+            - text: abc
+              number: 123
+            - text: efg
+              number: 456
+        nested_foo:
+          $ref: '#/components/schemas/Nested.Foo'
+          default: 'default foo'
+    Another.Foo:
+      type: string
+    Another.Bar:
+      type: object
+      properties:
+        original_foo:
+          allOf:
+            - $ref: '#/components/schemas/Foo'
+          default:
+            text: abc
+            number: 123
+        nested_foo:
+          type: array
+          items:
+            $ref: '#/components/schemas/Nested.Foo'
+          default:
+            - abc
+            - efg
diff -pruN 0.26.4-3/tests/data/openapi/definitions.yaml 0.34.0-1/tests/data/openapi/definitions.yaml
--- 0.26.4-3/tests/data/openapi/definitions.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/definitions.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,43 @@
+openapi: "3.0.0"
+schemas:
+  Problem:
+    properties:
+      detail:
+        description: |
+          A human readable explanation specific to this occurrence of the
+          problem. You MUST NOT expose internal information, personal
+          data or implementation details through this field.
+        example: Request took too long to complete.
+        type: string
+      instance:
+        description: |
+          An absolute URI that identifies the specific occurrence of the problem.
+          It may or may not yield further information if dereferenced.
+        format: uri
+        type: string
+      status:
+        description: |
+          The HTTP status code generated by the origin server for this occurrence
+          of the problem.
+        example: 503
+        exclusiveMaximum: true
+        format: int32
+        maximum: 600
+        minimum: 100
+        type: integer
+      title:
+        description: |
+          A short, summary of the problem type. Written in english and readable
+          for engineers (usually not suited for non technical stakeholders and
+          not localized); example: Service Unavailable
+        type: string
+      type:
+        default: about:blank
+        description: |
+          An absolute URI that identifies the problem type.  When dereferenced,
+          it SHOULD provide human-readable documentation for the problem type
+          (e.g., using HTML).
+        example: https://tools.ietf.org/html/rfc7231#section-6.6.4
+        format: uri
+        type: string
+    type: object
diff -pruN 0.26.4-3/tests/data/openapi/discriminator.yaml 0.34.0-1/tests/data/openapi/discriminator.yaml
--- 0.26.4-3/tests/data/openapi/discriminator.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/discriminator.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,42 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    ObjectBase:
+      description: Object schema
+      type: object
+      properties:
+        name:
+          description: Name of the object
+          type: string
+        type:
+          description: Object type
+          type: string
+          enum:
+            - my_first_object
+            - my_second_object
+            - my_third_object
+    CreateObjectRequest:
+      description: Request schema for object creation
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+      required:
+        - name
+        - type
+    UpdateObjectRequest:
+      description: Request schema for object updates
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+    Demo:
+        oneOf:
+          - $ref: "#/components/schemas/ObjectBase"
+          - $ref: "#/components/schemas/CreateObjectRequest"
+          - $ref: "#/components/schemas/UpdateObjectRequest"
+        discriminator:
+          propertyName: type
+          mapping:
+            type1: "#/components/schemas/ObjectBase"
+            type2: "#/components/schemas/CreateObjectRequest"
+            type3: "#/components/schemas/UpdateObjectRequest"
+
diff -pruN 0.26.4-3/tests/data/openapi/discriminator_enum.yaml 0.34.0-1/tests/data/openapi/discriminator_enum.yaml
--- 0.26.4-3/tests/data/openapi/discriminator_enum.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/discriminator_enum.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,40 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    Request:
+      oneOf:
+        - $ref: '#/components/schemas/RequestV1'
+        - $ref: '#/components/schemas/RequestV2'
+      discriminator:
+        propertyName: version
+        mapping:
+          v1: '#/components/schemas/RequestV1'
+          v2: '#/components/schemas/RequestV2'
+
+    RequestVersionEnum:
+      type: string
+      description: this is not included!
+      title: no title!
+      enum:
+        - v1
+        - v2
+    RequestBase:
+      properties:
+        version:
+          $ref: '#/components/schemas/RequestVersionEnum'
+      required:
+        - version
+
+    RequestV1:
+      allOf:
+        - $ref: '#/components/schemas/RequestBase'
+      properties:
+        request_id:
+          type: string
+          title: test title
+          description: there is description
+      required:
+        - request_id
+    RequestV2:
+      allOf:
+        - $ref: '#/components/schemas/RequestBase'
diff -pruN 0.26.4-3/tests/data/openapi/discriminator_enum_duplicate.yaml 0.34.0-1/tests/data/openapi/discriminator_enum_duplicate.yaml
--- 0.26.4-3/tests/data/openapi/discriminator_enum_duplicate.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/discriminator_enum_duplicate.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,64 @@
+# Example from https://docs.pydantic.dev/latest/concepts/unions/#discriminated-unions
+openapi: 3.1.0
+components:
+  schemas:
+    Cat:
+      properties:
+        pet_type:
+          const: "cat"
+          title: "Pet Type"
+        meows:
+          title: Meows
+          type: integer
+      required:
+        - pet_type
+        - meows
+      title: Cat
+      type: object
+    Dog:
+      properties:
+        pet_type:
+          const: "dog"
+          title: "Pet Type"
+        barks:
+          title: Barks
+          type: number
+      required:
+        - pet_type
+        - barks
+      title: Dog
+      type: object
+    Lizard:
+      properties:
+        pet_type:
+          enum:
+            - reptile
+            - lizard
+          title: Pet Type
+          type: string
+        scales:
+          title: Scales
+          type: boolean
+      required:
+        - pet_type
+        - scales
+      title: Lizard
+      type: object
+    Animal:
+      properties:
+        pet:
+          discriminator:
+            mapping:
+              cat: '#/components/schemas/Cat'
+              dog: '#/components/schemas/Dog'
+              lizard: '#/components/schemas/Lizard'
+              reptile: '#/components/schemas/Lizard'
+            propertyName: pet_type
+          oneOf:
+            - $ref: '#/components/schemas/Cat'
+            - $ref: '#/components/schemas/Dog'
+            - $ref: '#/components/schemas/Lizard'
+          title: Pet
+        'n':
+          title: 'N'
+          type: integer
diff -pruN 0.26.4-3/tests/data/openapi/discriminator_in_array_anyof.yaml 0.34.0-1/tests/data/openapi/discriminator_in_array_anyof.yaml
--- 0.26.4-3/tests/data/openapi/discriminator_in_array_anyof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/discriminator_in_array_anyof.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,48 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    ObjectBase:
+      description: Object schema
+      type: object
+      properties:
+        name:
+          description: Name of the object
+          type: string
+        type:
+          description: Object type
+          type: string
+          enum:
+            - my_first_object
+            - my_second_object
+    CreateObjectRequest:
+      description: Request schema for object creation
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+      required:
+        - name
+        - type
+    UpdateObjectRequest:
+      description: Request schema for object updates
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+    Demo:
+      type: object
+      required:
+        - myArray
+      properties:
+        myArray:
+          type: array
+          items:
+            oneOf:
+              - $ref: "#/components/schemas/ObjectBase"
+              - $ref: "#/components/schemas/CreateObjectRequest"
+              - $ref: "#/components/schemas/UpdateObjectRequest"
+            discriminator:
+              propertyName: type
+              mapping:
+                type1: "#/components/schemas/ObjectBase"
+                type2: "#/components/schemas/CreateObjectRequest"
+                type3: "#/components/schemas/UpdateObjectRequest"
+
diff -pruN 0.26.4-3/tests/data/openapi/discriminator_in_array_oneof.yaml 0.34.0-1/tests/data/openapi/discriminator_in_array_oneof.yaml
--- 0.26.4-3/tests/data/openapi/discriminator_in_array_oneof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/discriminator_in_array_oneof.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,48 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    ObjectBase:
+      description: Object schema
+      type: object
+      properties:
+        name:
+          description: Name of the object
+          type: string
+        type:
+          description: Object type
+          type: string
+          enum:
+            - my_first_object
+            - my_second_object
+    CreateObjectRequest:
+      description: Request schema for object creation
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+      required:
+        - name
+        - type
+    UpdateObjectRequest:
+      description: Request schema for object updates
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+    Demo:
+      type: object
+      required:
+        - myArray
+      properties:
+        myArray:
+          type: array
+          items:
+            anyOf:
+              - $ref: "#/components/schemas/ObjectBase"
+              - $ref: "#/components/schemas/CreateObjectRequest"
+              - $ref: "#/components/schemas/UpdateObjectRequest"
+            discriminator:
+              propertyName: type
+              mapping:
+                type1: "#/components/schemas/ObjectBase"
+                type2: "#/components/schemas/CreateObjectRequest"
+                type3: "#/components/schemas/UpdateObjectRequest"
+
diff -pruN 0.26.4-3/tests/data/openapi/discriminator_with_properties.yaml 0.34.0-1/tests/data/openapi/discriminator_with_properties.yaml
--- 0.26.4-3/tests/data/openapi/discriminator_with_properties.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/discriminator_with_properties.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,46 @@
+openapi: 3.0.1
+components:
+  schemas:
+    CustomContextVariable:
+      oneOf:
+        - $ref: '#/components/schemas/UserContextVariable'
+        - $ref: '#/components/schemas/IssueContextVariable'
+      properties:
+        "@type":
+          description: Type of custom context variable.
+          type: string
+      discriminator:
+        mapping:
+          user: '#/components/schemas/UserContextVariable'
+          issue: '#/components/schemas/IssueContextVariable'
+        propertyName: "@type"
+      required:
+        - "@type"
+      type: object
+    UserContextVariable:
+      properties:
+        accountId:
+          description: The account ID of the user.
+          type: string
+        "@type":
+          description: Type of custom context variable.
+          type: string
+      required:
+        - accountId
+        - "@type"
+      type: object
+    IssueContextVariable:
+      properties:
+        id:
+          description: The issue ID.
+          format: int64
+          type: integer
+        key:
+          description: The issue key.
+          type: string
+        "@type":
+          description: Type of custom context variable.
+          type: string
+      required:
+        - "@type"
+      type: object
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/discriminator_without_mapping.yaml 0.34.0-1/tests/data/openapi/discriminator_without_mapping.yaml
--- 0.26.4-3/tests/data/openapi/discriminator_without_mapping.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/discriminator_without_mapping.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,38 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    ObjectBase:
+      description: Object schema
+      type: object
+      properties:
+        name:
+          description: Name of the object
+          type: string
+        type:
+          description: Object type
+          type: string
+          enum:
+            - my_first_object
+            - my_second_object
+            - my_third_object
+    CreateObjectRequest:
+      description: Request schema for object creation
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+      required:
+        - name
+        - type
+    UpdateObjectRequest:
+      description: Request schema for object updates
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+    Demo:
+        oneOf:
+          - $ref: "#/components/schemas/ObjectBase"
+          - $ref: "#/components/schemas/CreateObjectRequest"
+          - $ref: "#/components/schemas/UpdateObjectRequest"
+        discriminator:
+          propertyName: type
+
diff -pruN 0.26.4-3/tests/data/openapi/duplicate_model_simplify.yaml 0.34.0-1/tests/data/openapi/duplicate_model_simplify.yaml
--- 0.26.4-3/tests/data/openapi/duplicate_model_simplify.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/duplicate_model_simplify.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+openapi: "3.0.3"
+info:
+  version: 1.0.0
+  title: T
+paths: {}
+components:
+  schemas:
+    M:
+      properties:
+        name: {"type": "string"}
+    m:
+      properties:
+        name: {"type": "string"}
+    R:
+      allOf:
+        - {"$ref":  "#/components/schemas/m"}
+        - {"$ref":  "#/components/schemas/M"}
diff -pruN 0.26.4-3/tests/data/openapi/duplicate_models.yaml 0.34.0-1/tests/data/openapi/duplicate_models.yaml
--- 0.26.4-3/tests/data/openapi/duplicate_models.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/duplicate_models.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,113 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    Event:
+      type: object
+      properties:
+        name:
+          type: string
+    Result:
+        type: object
+        properties:
+          event:
+            $ref: '#/components/schemas/Event'
+    Events:
+        type: array
+        items:
+          $ref: '#/components/schemas/Event'
+    EventRoot:
+        $ref: '#/components/schemas/Event'
+    EventObject:
+        type: object
+        properties:
+          event:
+            $ref: '#/components/schemas/Event'
+    DuplicateObject1:
+        type: object
+        properties:
+          event:
+            type: array
+            items:
+              $ref: '#/components/schemas/Event'
+    DuplicateObject2:
+        type: object
+        properties:
+          event:
+            type: object
+            properties:
+              event:
+                $ref: '#/components/schemas/Event'
+    DuplicateObject3:
+        $ref: '#/components/schemas/Event'
diff -pruN 0.26.4-3/tests/data/openapi/duplicate_models2.yaml 0.34.0-1/tests/data/openapi/duplicate_models2.yaml
--- 0.26.4-3/tests/data/openapi/duplicate_models2.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/duplicate_models2.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,72 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: Get pet
+      operationId: getPets
+      responses:
+        '200':
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+  /cars:
+    get:
+      summary: Get car
+      operationId: getCar
+      responses:
+        '200':
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Cars"
+
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+        - type
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+        type:
+          type: string
+          enum: [ 'pet' ]
+        details:
+          type: object
+          properties:
+            race: { type: string }
+    Car:
+      required:
+        - id
+        - name
+        - type
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+        type:
+          type: string
+          enum: [ 'car' ]
+        details:
+          type: object
+          properties:
+            brand: { type: string }
diff -pruN 0.26.4-3/tests/data/openapi/empty_aliases.json 0.34.0-1/tests/data/openapi/empty_aliases.json
--- 0.26.4-3/tests/data/openapi/empty_aliases.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/empty_aliases.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1 @@
+{}
diff -pruN 0.26.4-3/tests/data/openapi/empty_data.json 0.34.0-1/tests/data/openapi/empty_data.json
--- 0.26.4-3/tests/data/openapi/empty_data.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/empty_data.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1 @@
+{}
diff -pruN 0.26.4-3/tests/data/openapi/enum_models.yaml 0.34.0-1/tests/data/openapi/enum_models.yaml
--- 0.26.4-3/tests/data/openapi/enum_models.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/enum_models.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,154 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+        - number
+        - boolean
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+        kind:
+          type: string
+          enum: ['dog', 'cat']
+        type:
+          type: string
+          enum: [ 'animal' ]
+        number:
+          type: integer
+          enum: [ 1 ]
+        boolean:
+          type: boolean
+          enum: [ true ]
+
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    animal:
+      type: object
+      properties:
+        kind:
+          type: string
+          enum: ['snake', 'rabbit']
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    EnumObject:
+      type: object
+      properties:
+        type:
+          enum: ['a', 'b']
+          type: string
+    EnumRoot:
+      enum: ['a', 'b']
+      type: string
+    IntEnum:
+      enum: [1,2]
+      type: number
+    AliasEnum:
+      enum: [1,2,3]
+      type: number
+      x-enum-varnames: ['a', 'b', 'c']
+    MultipleTypeEnum:
+      enum: [ "red", "amber", "green", null, 42 ]
+    singleEnum:
+      enum: [ "pet" ]
+      type: string
+    arrayEnum:
+      type: array
+      items: [
+        { enum: [ "cat" ] },
+        { enum: [ "dog"]}
+      ]
+    nestedNullableEnum:
+      type: object
+      properties:
+        nested_version:
+          type: string
+          nullable: true
+          default: RC1
+          description: nullable enum
+          example: RC2
+          enum:
+            - RC1
+            - RC1N
+            - RC2
+            - RC2N
+            - RC3
+            - RC4
+            - null
+    version:
+      type: string
+      nullable: true
+      default: RC1
+      description: nullable enum
+      example: RC2
+      enum:
+      - RC1
+      - RC1N
+      - RC2
+      - RC2N
+      - RC3
+      - RC4
+      - null
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/example.yaml 0.34.0-1/tests/data/openapi/example.yaml
--- 0.26.4-3/tests/data/openapi/example.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/example.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,176 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    Users:
+      type: array
+      items:
+        required:
+          - id
+          - name
+        properties:
+          id:
+            type: integer
+            format: int64
+          name:
+            type: string
+          tag:
+            type: string
+    Id:
+      type: string
+    Rules:
+      type: array
+      items:
+        type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    apis:
+      type: array
+      items:
+        type: object
+        properties:
+          apiKey:
+            type: string
+            description: To be used as a dataset parameter value
+          apiVersionNumber:
+            type: string
+            description: To be used as a version parameter value
+          apiUrl:
+            type: string
+            format: uri
+            description: "The URL describing the dataset's fields"
+          apiDocumentationUrl:
+            type: string
+            format: uri
+            description: A URL to the API console for each API
+    Event:
+      type: object
+      properties:
+        name:
+          type: string
+    Result:
+        type: object
+        properties:
+          event:
+            $ref: '#/components/schemas/Event'
diff -pruN 0.26.4-3/tests/data/openapi/exclusive.yaml 0.34.0-1/tests/data/openapi/exclusive.yaml
--- 0.26.4-3/tests/data/openapi/exclusive.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/exclusive.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,42 @@
+openapi: 3.0.3
+info:
+  version: 1.0.0
+  title: Bug
+servers: []
+paths: {}
+components:
+  schemas:
+    MaximumProblem:
+      type: object
+      properties:
+        status:
+          type: integer
+          minimum: 100
+          maximum: 600
+          exclusiveMaximum: true
+    MinimumProblem:
+      type: object
+      properties:
+        status:
+          type: integer
+          minimum: 100
+          maximum: 600
+          exclusiveMinimum: true
+    MinimumMaximumProblem:
+      type: object
+      properties:
+        status:
+          type: integer
+          minimum: 100
+          maximum: 600
+          exclusiveMinimum: true
+          exclusiveMaximum: true
+    Problem:
+      type: object
+      properties:
+        status:
+          type: integer
+          minimum: 100
+          maximum: 600
+          exclusiveMinimum: false
+          exclusiveMaximum: false
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/external_relative_ref/model_a/types.openapi.yaml 0.34.0-1/tests/data/openapi/external_relative_ref/model_a/types.openapi.yaml
--- 0.26.4-3/tests/data/openapi/external_relative_ref/model_a/types.openapi.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/external_relative_ref/model_a/types.openapi.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,20 @@
+
+openapi: 3.0.3
+info:
+  title: Model A definitions
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    model_a.input:
+      type: object
+      properties:
+        name:
+          type: string
+    model_a.output:
+      type: object
+      properties:
+        output:
+          type: string
+        input:
+          $ref: "#/components/schemas/model_a.input"
diff -pruN 0.26.4-3/tests/data/openapi/external_relative_ref/model_b/module.openapi.yaml 0.34.0-1/tests/data/openapi/external_relative_ref/model_b/module.openapi.yaml
--- 0.26.4-3/tests/data/openapi/external_relative_ref/model_b/module.openapi.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/external_relative_ref/model_b/module.openapi.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,12 @@
+openapi: 3.0.3
+info:
+  title: Model B definitions
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    modules.quality_evaluation.QualityEvaluationRequest:
+      type: object
+      properties:
+        input:
+          $ref: "../model_a/types.openapi.yaml#/components/schemas/model_a.output"
diff -pruN 0.26.4-3/tests/data/openapi/extra_data.json 0.34.0-1/tests/data/openapi/extra_data.json
--- 0.26.4-3/tests/data/openapi/extra_data.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/extra_data.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "Pet": {
+    "comment": "1 2, 1 2, this is just a pet",
+    "config":{
+      "arbitrary_types_allowed": "True",
+      "coerce_numbers_to_str": "True"}
+  }
+}
diff -pruN 0.26.4-3/tests/data/openapi/extra_data_msgspec.json 0.34.0-1/tests/data/openapi/extra_data_msgspec.json
--- 0.26.4-3/tests/data/openapi/extra_data_msgspec.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/extra_data_msgspec.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "#all#": {
+        "base_class_kwargs": {
+            "omit_defaults": true
+        }
+    }
+}
diff -pruN 0.26.4-3/tests/data/openapi/flat_type.jsonschema 0.34.0-1/tests/data/openapi/flat_type.jsonschema
--- 0.26.4-3/tests/data/openapi/flat_type.jsonschema	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/flat_type.jsonschema	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+{
+  "title": "Foo",
+  "$schema": "http://json-schema.org/schema#",
+  "description": "",
+  "type": "object",
+  "properties": {
+    "foo": {
+      "$ref": "#/definitions/foo"
+    }
+  },
+  "definitions": {
+    "foo": {
+      "type": "string"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/openapi/inheritance.yaml 0.34.0-1/tests/data/openapi/inheritance.yaml
--- 0.26.4-3/tests/data/openapi/inheritance.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/inheritance.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,28 @@
+openapi: 3.1.0
+components:
+  schemas:
+    Base:
+      required:
+        - id
+      properties:
+        id:
+          type: string
+          format: uuid
+        createdAt:
+          type: string
+          format: date-time
+        version:
+          type: number
+          default: 1
+    Child:
+      allOf:
+        - $ref: "#/components/schemas/Base"
+        - properties:
+            url:
+              type: string
+              format: uri
+              default: "https://example.com"
+            title:
+              type: string
+          required:
+            - title
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/invalid.yaml 0.34.0-1/tests/data/openapi/invalid.yaml
--- 0.26.4-3/tests/data/openapi/invalid.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/invalid.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,2 @@
+invalid:
+  openapi
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/json_pointer.yaml 0.34.0-1/tests/data/openapi/json_pointer.yaml
--- 0.26.4-3/tests/data/openapi/json_pointer.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/json_pointer.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,5 @@
+components:
+  schemas:
+    Foo:
+      allOf:
+        - $ref: "root_model.yaml#/Test"
diff -pruN 0.26.4-3/tests/data/openapi/lazy_resolved_models.yaml 0.34.0-1/tests/data/openapi/lazy_resolved_models.yaml
--- 0.26.4-3/tests/data/openapi/lazy_resolved_models.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/lazy_resolved_models.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,95 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    Results:
+        type: object
+        properties:
+          envets:
+            items:
+              $ref: '#/components/schemas/Events'
+          event:
+            items:
+              $ref: '#/components/schemas/Event'
+    Events:
+        type: array
+        items:
+          $ref: '#/components/schemas/Event'
+    Event:
+      type: object
+      properties:
+        name:
+          type: string
+        event:
+          $ref: '#/components/schemas/Event'
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/list.json 0.34.0-1/tests/data/openapi/list.json
--- 0.26.4-3/tests/data/openapi/list.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/list.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1 @@
+["Ceci", "n'est", "pas", "une", "object"]
diff -pruN 0.26.4-3/tests/data/openapi/max_items_enum.yaml 0.34.0-1/tests/data/openapi/max_items_enum.yaml
--- 0.26.4-3/tests/data/openapi/max_items_enum.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/max_items_enum.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,17 @@
+openapi: "3.1.0"
+components:
+  schemas:
+    Foo:
+      type: object
+      properties:
+        bar:
+          type: array
+          items:
+            enum:
+              - hello
+              - goodbye
+            maxLength: 5
+            minLength: 1
+            type: string
+            pattern: "^.*$"
+          maxItems: 3
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/max_min_number.yaml 0.34.0-1/tests/data/openapi/max_min_number.yaml
--- 0.26.4-3/tests/data/openapi/max_min_number.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/max_min_number.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,30 @@
+openapi: 3.0.3
+info:
+  title: Product API
+  version: 1.0
+paths:
+  /product:
+    post:
+      operationId: createProduct
+      description: Create new product
+      requestBody:
+        required: true
+        content:
+          application/json:
+            schema:
+              $ref: '#/components/schemas/product'
+      responses:
+        '200':
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/product'
+components:
+  schemas:
+    product:
+      type: object
+      properties:
+        price:
+          type: number
+          minimum: -999999.999999
+          maximum: 999999.999999
diff -pruN 0.26.4-3/tests/data/openapi/modular.yaml 0.34.0-1/tests/data/openapi/modular.yaml
--- 0.26.4-3/tests/data/openapi/modular.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/modular.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,287 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Modular Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/collections.Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/collections.Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    models.Species:
+      type: string
+      enum:
+        - dog
+        - cat
+        - snake
+    models.Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+        species:
+          $ref: '#/components/schemas/models.Species'
+    models.User:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    collections.Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/models.Pet"
+    collections.Users:
+      type: array
+      items:
+        $ref: "#/components/schemas/models.User"
+    optional:
+      type: string
+    Id:
+      type: string
+    collections.Rules:
+      type: array
+      items:
+        type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    collections.apis:
+      type: array
+      items:
+        type: object
+        properties:
+          apiKey:
+            type: string
+            description: To be used as a dataset parameter value
+          apiVersionNumber:
+            type: string
+            description: To be used as a version parameter value
+          apiUrl:
+            type: string
+            format: uri
+            description: "The URL describing the dataset's fields"
+          apiDocumentationUrl:
+            type: string
+            format: uri
+            description: A URL to the API console for each API
+          stage:
+            type: string
+            enum: [
+              "test",
+              "dev",
+              "stg",
+              "prod"
+            ]
+    models.Event:
+      type: object
+      properties:
+        name:
+          anyOf:
+            - type: string
+            - type: number
+            - type: integer
+            - type: boolean
+            - type: object
+            - type: array
+              items:
+                type: string
+    Result:
+      type: object
+      properties:
+        event:
+          $ref: '#/components/schemas/models.Event'
+    foo.bar.Thing:
+      properties:
+        attributes:
+          type: object
+    foo.bar.Thang:
+      properties:
+        attributes:
+          type: array
+          items:
+            type: object
+    foo.bar.Clone:
+      allOf:
+        - $ref: '#/components/schemas/foo.bar.Thing'
+        - type: object
+          properties:
+            others:
+              type: object
+              properties:
+                 name:
+                   type: string
+
+    foo.Tea:
+      properties:
+        flavour:
+          type: string
+        id:
+          $ref: '#/components/schemas/Id'
+    Source:
+      properties:
+        country:
+          type: string
+    foo.Cocoa:
+      properties:
+        quality:
+          type: integer
+    bar.Field:
+      type: string
+      example: green
+    woo.boo.Chocolate:
+      properties:
+        flavour:
+          type: string
+        source:
+          $ref: '#/components/schemas/Source'
+        cocoa:
+          $ref: '#/components/schemas/foo.Cocoa'
+        field:
+          $ref: '#/components/schemas/bar.Field'
+    differentTea:
+      type: object
+      properties:
+        foo:
+          $ref: '#/components/schemas/foo.Tea'
+        nested:
+          $ref: '#/components/schemas/nested.foo.Tea'
+    nested.foo.Tea:
+      properties:
+        flavour:
+          type: string
+        id:
+          $ref: '#/components/schemas/Id'
+        self:
+          $ref: '#/components/schemas/nested.foo.Tea'
+        optional:
+          type: array
+          items:
+            $ref: '#/components/schemas/optional'
+    nested.foo.TeaClone:
+      properties:
+        flavour:
+          type: string
+        id:
+          $ref: '#/components/schemas/Id'
+        self:
+          $ref: '#/components/schemas/nested.foo.Tea'
+        optional:
+          type: array
+          items:
+            $ref: '#/components/schemas/optional'
+    nested.foo.List:
+      type: array
+      items:
+        $ref: '#/components/schemas/nested.foo.Tea'
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/multiple_required_any_of.yaml 0.34.0-1/tests/data/openapi/multiple_required_any_of.yaml
--- 0.26.4-3/tests/data/openapi/multiple_required_any_of.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/multiple_required_any_of.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+openapi: 3.0.0
+info:
+paths:
+components:
+  schemas:
+    Addr:
+      type: object
+      properties:
+        ipv4Addr:
+          $ref: '#/components/schemas/Ipv4Addr'
+        ipv6Addr:
+          $ref: '#/components/schemas/Ipv6Addr'
+      anyOf:
+        - required: [ ipv4Addr ]
+        - required: [ ipv6Addr ]
+    Ipv4Addr:
+      type: string
+      format: ipv4
+    Ipv6Addr:
+      type: string
+      format: ipv6
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/nested_anyof.yaml 0.34.0-1/tests/data/openapi/nested_anyof.yaml
--- 0.26.4-3/tests/data/openapi/nested_anyof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/nested_anyof.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,27 @@
+openapi: 3.0.0
+info:
+  title: datamodel-code-generator bug example
+components:
+  schemas:
+    Container:
+      allOf:
+        - type: object
+          required:
+            - contents
+          properties:
+            contents:
+              type: array
+              items:
+                anyOf:
+                  - $ref: '#/components/schemas/Type1'
+                  - $ref: '#/components/schemas/Type2'
+    Type1:
+      type: object
+      properties:
+        prop:
+          type: string
+    Type2:
+      type: object
+      properties:
+        prop:
+          type: string
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/nested_enum.json 0.34.0-1/tests/data/openapi/nested_enum.json
--- 0.26.4-3/tests/data/openapi/nested_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/nested_enum.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,63 @@
+{
+    "openapi": "3.0.0",
+    "info": {
+        "title": "Test API",
+        "version": "1.0"
+    },
+    "paths": {},
+    "components": {
+        "schemas": {
+          "Result1": {
+                "type": "object",
+                "description": "description for Result1",
+                "properties": {
+                    "state": {
+                        "$ref": "#/components/schemas/NestedState1"
+                    }
+                },
+                "required": [
+                    "state"
+                ]
+            },
+            "Result2": {
+                "type": "object",
+                "description": "description for Result2",
+                "properties": {
+                    "state": {
+                        "$ref": "#/components/schemas/NestedState2"
+                    }
+                },
+                "required": [
+                    "state"
+                ]
+            },
+            "NestedState1": {
+                "allOf": [
+                    {
+                        "description": "description for NestedState1"
+                    },
+                    {
+                        "$ref": "#/components/schemas/State"
+                    }
+                ]
+            },
+            "NestedState2": {
+                "allOf": [
+                    {
+                        "description": "description for NestedState2"
+                    },
+                    {
+                        "$ref": "#/components/schemas/State"
+                    }
+                ]
+            },
+            "State": {
+                "type": "string",
+                "enum": [
+                    "1",
+                    "2"
+                ]
+            }
+        }
+    }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/nested_oneof.yaml 0.34.0-1/tests/data/openapi/nested_oneof.yaml
--- 0.26.4-3/tests/data/openapi/nested_oneof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/nested_oneof.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,27 @@
+openapi: 3.0.0
+info:
+  title: datamodel-code-generator bug example
+components:
+  schemas:
+    Container:
+      allOf:
+        - type: object
+          required:
+            - contents
+          properties:
+            contents:
+              type: array
+              items:
+                oneOf:
+                  - $ref: '#/components/schemas/Type1'
+                  - $ref: '#/components/schemas/Type2'
+    Type1:
+      type: object
+      properties:
+        prop:
+          type: string
+    Type2:
+      type: object
+      properties:
+        prop:
+          type: string
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/no_components.yaml 0.34.0-1/tests/data/openapi/no_components.yaml
--- 0.26.4-3/tests/data/openapi/no_components.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/no_components.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,99 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
diff -pruN 0.26.4-3/tests/data/openapi/not.json 0.34.0-1/tests/data/openapi/not.json
--- 0.26.4-3/tests/data/openapi/not.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/not.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1 @@
+This is not JSON!
diff -pruN 0.26.4-3/tests/data/openapi/not_real_string.json 0.34.0-1/tests/data/openapi/not_real_string.json
--- 0.26.4-3/tests/data/openapi/not_real_string.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/not_real_string.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,61 @@
+{
+  "openapi" : "3.0.0",
+  "components" : {
+    "schemas" : {
+      "UserId" : {
+        "type" : "string"
+      },
+      "Tweet" : {
+        "type" : "object",
+        "properties" : {
+          "author_id" : {
+            "$ref" : "#/components/schemas/UserId"
+          }
+        }
+      },
+      "Users": {
+        "type": "array",
+        "items": [{
+          "$ref":  "#/components/schemas/UserId"
+        }]
+      },
+      "FileHash": {
+        "type": "string",
+        "minLength": 32,
+        "maxLength": 32,
+        "pattern": "^[a-fA-F\\d]{32}$",
+        "description": "For file"
+      },
+      "ImageHash": {
+        "$ref": "#/components/schemas/FileHash",
+        "maxLength": 64,
+        "minLength": 64
+      },
+       "FileRequest": {
+        "type": "object",
+        "required": ["file_hash"],
+        "properties": {
+          "file_hash": {
+            "$ref": "#/components/schemas/FileHash"
+          }
+        }
+      },
+      "ImageRequest": {
+        "type": "object",
+        "required": ["file_hash"],
+        "properties": {
+          "image_hash": {
+            "$ref": "#/components/schemas/ImageHash",
+            "description": "For image"
+          }
+        }
+      },
+      "FileHashes": {
+        "type": "array",
+        "items": {
+           "$ref": "#/components/schemas/FileHash"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/nullable.yaml 0.34.0-1/tests/data/openapi/nullable.yaml
--- 0.26.4-3/tests/data/openapi/nullable.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/nullable.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,122 @@
+openapi: 3.0.3
+info:
+  version: 1.0.0
+  title: testapi
+  license:
+    name: proprietary
+servers: []
+paths: {}
+components:
+  schemas:
+    TopLevel:
+      type: object
+      properties:
+        cursors:
+          type: object
+          properties:
+            prev:
+              type: string
+              nullable: true
+            next:
+              type: string
+              default: last
+            index:
+              type: number
+            tag:
+              type: string
+          required:
+          - prev
+          - index
+      required:
+      - cursors
+    User:
+      type: object
+      properties:
+        info:
+          type: object
+          properties:
+            name:
+              type: string
+          required:
+            - name
+      required:
+        - info
+    apis:
+      type: array
+      nullable: true
+      items:
+        type: object
+        properties:
+          apiKey:
+            type: string
+            description: To be used as a dataset parameter value
+          apiVersionNumber:
+            type: string
+            description: To be used as a version parameter value
+          apiUrl:
+            type: string
+            format: uri
+            description: "The URL describing the dataset's fields"
+            nullable: true
+          apiDocumentationUrl:
+            type: string
+            format: uri
+            description: A URL to the API console for each API
+            nullable: true
+    email:
+      type: array
+      items:
+        type: object
+        properties:
+          author:
+            type: string
+          address:
+            type: string
+            description: email address
+          description:
+            type: string
+            default: empty
+          tag:
+            type: string
+        required:
+          - author
+          - address
+    id:
+      type: integer
+      default: 1
+    description:
+      type: string
+      nullable: true
+      default: example
+    name:
+      type: string
+      nullable: true
+    tag:
+      type: string
+    notes:
+      type: object
+      properties:
+        comments:
+          type: array
+          items:
+              type: string
+          default_factory: list
+          nullable: false
+    options:
+      type: object
+      properties:
+        comments:
+          type: array
+          items:
+              type: string
+              nullable: true
+        oneOfComments:
+           type: array
+           items:
+               oneOf:
+                - type: string
+                - type: number
+               nullable: true
+      required:
+        - comments
+        - oneOfComments
diff -pruN 0.26.4-3/tests/data/openapi/nullable_31.yaml 0.34.0-1/tests/data/openapi/nullable_31.yaml
--- 0.26.4-3/tests/data/openapi/nullable_31.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/nullable_31.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,23 @@
+openapi: 3.1.0
+info:
+  version: 1.0.0
+  title: testapi
+  license:
+    name: proprietary
+servers: []
+paths: {}
+components:
+  schemas:
+    Basket:
+      type: object
+      properties:
+        apples:
+          type:
+            - array
+            - 'null'
+          items:
+            $ref: '#/components/schemas/Apple'
+      required:
+        - apples
+    Apple:
+      type: object
diff -pruN 0.26.4-3/tests/data/openapi/oas_response_reference.yaml 0.34.0-1/tests/data/openapi/oas_response_reference.yaml
--- 0.26.4-3/tests/data/openapi/oas_response_reference.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/oas_response_reference.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,75 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+  description: |
+    This description is for testing
+    multi-line
+    description
+
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            default: 0
+            type: integer
+            format: int32
+      responses:
+        '200':
+          $ref: '#/components/responses/Pet'
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+components:
+  responses:
+    Pet:
+      description: A paged array of pets
+      headers:
+        x-next:
+          description: A link to the next page of responses
+          schema:
+            type: string
+      content:
+        application/json:
+          schema:
+            items:
+              $ref: "#/components/schemas/Pet"
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/oneof.yaml 0.34.0-1/tests/data/openapi/oneof.yaml
--- 0.26.4-3/tests/data/openapi/oneof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/oneof.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,183 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Car:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    OneOfItem:
+      oneOf:
+        - $ref: "#/components/schemas/Pet"
+        - $ref: "#/components/schemas/Car"
+        - type: object
+          properties:
+              name:
+                type: string
+        - type: string
+          maxLength: 5000
+    OneOfobj:
+      type: object
+      properties:
+        item:
+          oneOf:
+            - $ref: "#/components/schemas/Pet"
+            - $ref: "#/components/schemas/Car"
+            - type: object
+              properties:
+                name:
+                  type: string
+            - type: string
+              maxLength: 5000
+    OneOfArray:
+      type: array
+      items:
+          oneOf:
+            - $ref: "#/components/schemas/Pet"
+            - $ref: "#/components/schemas/Car"
+            - type: object
+              properties:
+                name:
+                  type: string
+                birthday:
+                  type: string
+                  format: date
+            - type: string
+              maxLength: 5000
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    Config:
+      properties:
+        setting:
+          type: object
+          additionalProperties:
+            oneOf:
+            - type: string
+            - type: array
+              items:
+                type: string
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/override_required_all_of.yaml 0.34.0-1/tests/data/openapi/override_required_all_of.yaml
--- 0.26.4-3/tests/data/openapi/override_required_all_of.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/override_required_all_of.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,41 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    ObjectBase:
+      description: Object schema
+      type: object
+      properties:
+        name:
+          description: Name of the object
+          type: string
+        type:
+          description: Object type
+          type: string
+          enum:
+            - my_first_object
+            - my_second_object
+        rank:
+          description: User rank
+          type:
+            - integer
+            - number
+        allIn:
+          oneOf:
+            - $ref: '#/components/schemas/ObjectBase/properties/name'
+            - $ref: '#/components/schemas/ObjectBase/properties/type'
+            - $ref: '#/components/schemas/ObjectBase/properties/rank'
+    CreateObjectRequest:
+      description: Request schema for object creation
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+      required:
+        - name
+        - type
+        - rank
+        - allIn
+    UpdateObjectRequest:
+      description: Request schema for object updates
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/pattern.yaml 0.34.0-1/tests/data/openapi/pattern.yaml
--- 0.26.4-3/tests/data/openapi/pattern.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/pattern.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,25 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+components:
+  schemas:
+    info:
+      type: object
+      properties:
+        hostName:
+          type: string
+          format: hostname
+        arn:
+          type: string
+          pattern: '(^arn:([^:]*):([^:]*):([^:]*):(|\*|[\d]{12}):(.+)$)|^\*$'
+        tel:
+          type: string
+          pattern: '^(\([0-9]{3}\))?[0-9]{3}-[0-9]{4}$'
+        comment:
+          type: string
+          pattern: '[^\b\f\n\r\t\\a+.?''"|()]+$'
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/pattern_lookaround.yaml 0.34.0-1/tests/data/openapi/pattern_lookaround.yaml
--- 0.26.4-3/tests/data/openapi/pattern_lookaround.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/pattern_lookaround.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+components:
+  schemas:
+    info:
+      type: object
+      properties:
+        name:
+          type: string
+          pattern: '.*foo.*(?<!baz)bar.*'
diff -pruN 0.26.4-3/tests/data/openapi/query_parameters.yaml 0.34.0-1/tests/data/openapi/query_parameters.yaml
--- 0.26.4-3/tests/data/openapi/query_parameters.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/query_parameters.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,197 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+  description: |
+    This description is for testing
+    multi-line
+    description
+
+servers:
+  - url: http://petstore.swagger.io/v1
+security:
+  - BearerAuth: []
+paths:
+  /pets/{petId}:
+    get:
+      summary: Get a pet by ID
+      operationId: getPet
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The pet ID
+          schema:
+            type: string
+        - name: include
+          in: query
+          required: false
+          description: Include additional data
+          schema:
+            type: string
+      responses:
+        '200':
+          description: A pet
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            default: 0
+            type: integer
+        - name: HomeAddress
+          in: query
+          required: false
+          schema:
+            default: 'Unknown'
+            type: string
+        - name: kind
+          in: query
+          required: false
+          schema:
+            default: dog
+            type: string
+        - in: query
+          name: filter
+
+          # Wrap 'schema' into 'content.<media-type>'
+          content:
+            application/json: # <---- media type indicates how to serialize / deserialize the parameter content
+              schema:
+                type: object
+                properties:
+                  type:
+                    type: string
+                  color:
+                    type: string
+        - in: query
+          name: multipleMediaFilter
+
+          # Wrap 'schema' into 'content.<media-type>'
+          content:
+            application/xml: # <---- media type indicates how to serialize / deserialize the parameter content
+              schema:
+                type: object
+                properties:
+                  type:
+                    type: string
+                  media_type:
+                    type: string
+                    enum:
+                      - xml
+                      - json
+                    default: xml
+            application/json: # <---- media type indicates how to serialize / deserialize the parameter content
+              schema:
+                type: object
+                properties:
+                  type:
+                    type: string
+                  media_type:
+                    type: string
+                    enum:
+                      - xml
+                      - json
+                    default: json
+        - in: query
+          name: empty
+          content:
+            application/json:
+              {}
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  $ref: "#/components/schemas/Pet"
+        '500':
+          description: An internal error occurred
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    post:
+      summary: Create a pet
+      requestBody:
+        required: true
+        content:
+          application/json:
+            schema:
+              type: object
+              properties:
+                name:
+                  type: string
+                age:
+                  type: integer
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+components:
+  parameters:
+    MyParam:
+      name: foo
+      in: query
+      schema:
+        type: string
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    PetForm:
+      title: PetForm
+      type: object
+      properties:
+        name:
+          type: string
+        age:
+          type: integer
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/reference_same_hierarchy_directory/common/cat.yaml 0.34.0-1/tests/data/openapi/reference_same_hierarchy_directory/common/cat.yaml
--- 0.26.4-3/tests/data/openapi/reference_same_hierarchy_directory/common/cat.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/reference_same_hierarchy_directory/common/cat.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,24 @@
+# ./common/cat.yaml
+CatInfo:
+  type: object
+  required:
+    - cat_id
+  properties:
+    cat_id:
+      type: string
+      description: ID of this cat
+    details:
+      $ref: "#/CatDetails"
+
+CatDetails:
+  type: object
+  required:
+    - name
+    - birthYear
+  properties:
+    name:
+      type: string
+      description: Name of this cat
+    birthYear:
+      type: number
+      description: Year of this cat's birth
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/reference_same_hierarchy_directory/public/entities.yaml 0.34.0-1/tests/data/openapi/reference_same_hierarchy_directory/public/entities.yaml
--- 0.26.4-3/tests/data/openapi/reference_same_hierarchy_directory/public/entities.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/reference_same_hierarchy_directory/public/entities.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,9 @@
+# ./public/entities.yaml
+openapi: 3.0.3
+info:
+  title: "Entity Schemas"
+paths: {}
+components:
+  schemas:
+    CatInfo:
+      $ref: "../common/cat.yaml#/CatInfo"
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/reference_to_object_properties.yaml 0.34.0-1/tests/data/openapi/reference_to_object_properties.yaml
--- 0.26.4-3/tests/data/openapi/reference_to_object_properties.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/reference_to_object_properties.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,38 @@
+openapi: "3.0.3"
+info:
+  title: Example
+  version: 1.0.0
+
+paths: {}
+
+components:
+  schemas:
+    Id:
+      type: string
+    Parent:
+      type: object
+      properties:
+        id:
+          $ref: "#/components/schemas/Id"
+        name:
+          type: string
+        pet:
+          $ref: "#/components/schemas/Pet"
+    Child:
+      type: object
+      properties:
+        id:
+          $ref: "#/components/schemas/Id"
+        parent_id:
+          $ref: "#/components/schemas/Parent/properties/id"
+        name:
+          type: string
+        pet:
+          $ref: "#/components/schemas/Parent/properties/pet"
+    Pet:
+      type: object
+      properties:
+        name:
+          type: string
+        age:
+          type: integer
diff -pruN 0.26.4-3/tests/data/openapi/referenced_default.yaml 0.34.0-1/tests/data/openapi/referenced_default.yaml
--- 0.26.4-3/tests/data/openapi/referenced_default.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/referenced_default.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,22 @@
+openapi: 3.1.0
+info:
+  title: Title
+  description: Title
+  version: 1.0.0
+components:
+  schemas:
+    Model:
+      type: "object"
+      properties:
+        settingA:
+          type: "number"
+          default: 5
+          minimum: 0
+          maximum: 10
+        settingB:
+          $ref: "#/components/schemas/ModelSettingB"
+    ModelSettingB:
+      type: "number"
+      default: 5
+      minimum: 0
+      maximum: 10
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/refs.yaml 0.34.0-1/tests/data/openapi/refs.yaml
--- 0.26.4-3/tests/data/openapi/refs.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/refs.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,5 @@
+openapi: 3.0.1
+components:
+  schemas:
+    Problem:
+      $ref: "https://teamdigitale.github.io/openapi/0.0.6/definitions.yaml#/schemas/Problem"
diff -pruN 0.26.4-3/tests/data/openapi/required_null.yaml 0.34.0-1/tests/data/openapi/required_null.yaml
--- 0.26.4-3/tests/data/openapi/required_null.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/required_null.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    ObjectBase:
+      description: Object schema
+      type: object
+      properties:
+        name:
+          description: Name of the object
+          type: string
+        type:
+          description: Object type
+          type: string
+          enum:
+            - my_first_object
+            - my_second_object
+            - my_third_object
+      required: null
diff -pruN 0.26.4-3/tests/data/openapi/resolved_models.yaml 0.34.0-1/tests/data/openapi/resolved_models.yaml
--- 0.26.4-3/tests/data/openapi/resolved_models.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/resolved_models.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,82 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    Resolved:
+      type: object
+      properties:
+        resolved:
+          type: array
+          items:
+            type: string
diff -pruN 0.26.4-3/tests/data/openapi/root_model.yaml 0.34.0-1/tests/data/openapi/root_model.yaml
--- 0.26.4-3/tests/data/openapi/root_model.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/root_model.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+TestNested:
+  type: object
+  properties:
+    test_string:
+      type: string
+    nested_nested:
+      $ref: "#/TestNestedNested"
+TestNestedNested:
+  type: object
+  properties:
+    test_nested_nested_string:
+      type: string
+Test:
+  allOf:
+    - $ref: "#/TestNested"
diff -pruN 0.26.4-3/tests/data/openapi/same_name_objects.yaml 0.34.0-1/tests/data/openapi/same_name_objects.yaml
--- 0.26.4-3/tests/data/openapi/same_name_objects.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/same_name_objects.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,15 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+components:
+  schemas:
+    Pets:
+      type: object
+      additionalProperties: false
+    Friends1:
+      $ref: "resolved_models.yaml#/components/schemas/Pets"
+    Friends2:
+      $ref: "resolved_models.yaml#/components/schemas/Pets"
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/shadowed_imports.yaml 0.34.0-1/tests/data/openapi/shadowed_imports.yaml
--- 0.26.4-3/tests/data/openapi/shadowed_imports.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/shadowed_imports.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,32 @@
+openapi: 3.0.0
+info:
+  title: REST API
+  version: 0.0.1
+servers:
+  - url: https://api.something.com/1
+components:
+  schemas:
+    marketingOptIn:
+      type: object
+      properties:
+        optedIn:
+          type: boolean
+          example: false
+        date:
+          type: string
+          format: date
+          example: '2018-04-26T17:03:25.155Z'
+paths:
+  /actions/:
+    get:
+      parameters:
+        - name: due
+          in: query
+          description: A due date for the card
+          required: false
+          schema:
+            type: string
+            format: date
+      responses:
+        '200':
+          description: Success
diff -pruN 0.26.4-3/tests/data/openapi/special_yaml_keywords.yaml 0.34.0-1/tests/data/openapi/special_yaml_keywords.yaml
--- 0.26.4-3/tests/data/openapi/special_yaml_keywords.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/special_yaml_keywords.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,44 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      responses:
+        '200':
+          description: A paged array of pets
+components:
+  schemas:
+    None:
+      type: object
+    "false":
+      type: object
+    "True":
+      type: object
+    "on":
+      type: object
+    NestedKeywords:
+      type: object
+      properties:
+        None:
+          $ref: "#/components/schemas/None"
+        "false":
+          $ref: "#/components/schemas/false"
+        "True":
+          $ref: "#/components/schemas/True"
+        "on":
+          $ref: "#/components/schemas/on"
+      required:
+        - None
+        - "false"
+        - "True"
+        - "on"
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/subclass_enum.json 0.34.0-1/tests/data/openapi/subclass_enum.json
--- 0.26.4-3/tests/data/openapi/subclass_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/subclass_enum.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,37 @@
+{
+  "openapi": "3.0.2",
+  "components": {
+    "schemas": {
+      "ProcessingStatus": {
+        "title": "ProcessingStatus",
+        "enum": [
+          "COMPLETED",
+          "PENDING",
+          "FAILED"
+        ],
+        "type": "string",
+        "description": "The processing status"
+      },
+      "ProcessingTask": {
+        "title": "ProcessingTask",
+        "type": "object",
+        "properties": {
+          "processing_status": {
+            "title": "Status of the task",
+            "allOf": [
+              {
+                "$ref": "#/components/schemas/ProcessingStatus"
+              }
+            ],
+            "default": "COMPLETED"
+          }
+        }
+      },
+    }
+  },
+  "info": {
+    "title": "",
+    "version": ""
+  },
+  "paths": {}
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/unsorted_optional_fields.yaml 0.34.0-1/tests/data/openapi/unsorted_optional_fields.yaml
--- 0.26.4-3/tests/data/openapi/unsorted_optional_fields.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/unsorted_optional_fields.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+openapi: 3.0.3
+info:
+  title: Title
+  description: Title
+  version: 1.0.0
+servers:
+  - url: 'https'
+paths:
+components:
+  schemas:
+   Note:
+      type: object
+      required:
+        - text
+      properties:
+        author:
+          type: string
+        text:
+          type: string
diff -pruN 0.26.4-3/tests/data/openapi/x_enum_varnames.yaml 0.34.0-1/tests/data/openapi/x_enum_varnames.yaml
--- 0.26.4-3/tests/data/openapi/x_enum_varnames.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/openapi/x_enum_varnames.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,61 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    string:
+      type: string
+      enum:
+        - 'dog'
+        - 'cat'
+        - 'snake'
+    unknown_type_string:
+      enum:
+        - 'dog'
+        - 'cat'
+        - 'snake'
+    named_string:
+      type: string
+      description: Operator to filter data by.
+      enum:
+        - '='
+        - '!='
+        - '>'
+        - '<'
+        - '>='
+        - '<='
+      x-enum-varnames:
+        - EQ
+        - NE
+        - GT
+        - LT
+        - GE
+        - LE
+      example: '>='
+      xml:
+        attribute: true
+    named_number:
+      type: number
+      description: Operator to filter data by.
+      enum:
+        - 1
+        - 2
+        - 3
+      x-enum-varnames:
+        - one
+        - two
+        - three
+      example: 1
+    number:
+      type: number
+      description: Operator to filter data by.
+      enum:
+        - 1
+        - 2
+        - 3
+      example: 1
+    unknown_type_number:
+      description: Operator to filter data by.
+      enum:
+        - 1
+        - 2
+        - 3
+      example: 1
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/project/pyproject.toml 0.34.0-1/tests/data/project/pyproject.toml
--- 0.26.4-3/tests/data/project/pyproject.toml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/project/pyproject.toml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,16 @@
+[tool.black]
+skip-string-normalization = false
+line-length = 30
+
+[tool.datamodel-codegen]
+input = "INPUT_PATH"
+output = "OUTPUT_PATH"
+input_file_type = 'openapi'
+validation = true
+field-constraints = true
+snake-case-field = true
+strip-default-none = true
+target-python-version = "3.9"
+aliases = "ALIASES_PATH"
+extra-template-data = "EXTRA_TEMPLATE_DATA_PATH"
+custom-template-dir = "CUSTOM_TEMPLATE_DIR_PATH"
diff -pruN 0.26.4-3/tests/data/pyproject.toml 0.34.0-1/tests/data/pyproject.toml
--- 0.26.4-3/tests/data/pyproject.toml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/pyproject.toml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,10 @@
+[tool.isort]
+multi_line_output = 3
+include_trailing_comma = true
+force_grid_wrap = 0
+use_parentheses = true
+line_length = 88
+skip = "tests/data"
+
+sections = [ 'FUTURE', 'STDLIB', 'THIRDPARTY', 'FIRSTPARTY', 'LOCALFOLDER' ]
+known_first_party = [ 'custom_module' ]
diff -pruN 0.26.4-3/tests/data/python/custom_formatters/add_comment.py 0.34.0-1/tests/data/python/custom_formatters/add_comment.py
--- 0.26.4-3/tests/data/python/custom_formatters/add_comment.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/python/custom_formatters/add_comment.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+from datamodel_code_generator.format import CustomCodeFormatter
+
+
+class CodeFormatter(CustomCodeFormatter):
+    """Simple correct formatter. Adding a comment to top of code."""
+    def apply(self, code: str) -> str:
+        return f'# a comment\n{code}'
diff -pruN 0.26.4-3/tests/data/python/custom_formatters/add_license.py 0.34.0-1/tests/data/python/custom_formatters/add_license.py
--- 0.26.4-3/tests/data/python/custom_formatters/add_license.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/python/custom_formatters/add_license.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,24 @@
+from typing import Any, Dict
+from pathlib import Path
+
+from datamodel_code_generator.format import CustomCodeFormatter
+
+
+class CodeFormatter(CustomCodeFormatter):
+    """Add a license to file from license file path."""
+
+    def __init__(self, formatter_kwargs: Dict[str, Any]) -> None:
+        super().__init__(formatter_kwargs)
+
+        if "license_file" not in formatter_kwargs:
+            raise ValueError()
+
+        license_file_path = Path(formatter_kwargs["license_file"]).resolve()
+
+        with license_file_path.open("r") as f:
+            license_file = f.read()
+
+        self.license_header = "\n".join([f"# {line}".strip() for line in license_file.split("\n")])
+
+    def apply(self, code: str) -> str:
+        return f"{self.license_header}\n{code}"
diff -pruN 0.26.4-3/tests/data/python/custom_formatters/license_example.txt 0.34.0-1/tests/data/python/custom_formatters/license_example.txt
--- 0.26.4-3/tests/data/python/custom_formatters/license_example.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/python/custom_formatters/license_example.txt	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+MIT License
+
+Copyright (c) 2023 Blah-blah
diff -pruN 0.26.4-3/tests/data/python/custom_formatters/not_subclass.py 0.34.0-1/tests/data/python/custom_formatters/not_subclass.py
--- 0.26.4-3/tests/data/python/custom_formatters/not_subclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/python/custom_formatters/not_subclass.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+class CodeFormatter:
+    """Invalid formatter: is not subclass of `datamodel_code_generator.format.CustomCodeFormatter`."""
+    pass
diff -pruN 0.26.4-3/tests/data/python/custom_formatters/wrong.py 0.34.0-1/tests/data/python/custom_formatters/wrong.py
--- 0.26.4-3/tests/data/python/custom_formatters/wrong.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/python/custom_formatters/wrong.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,7 @@
+from datamodel_code_generator.format import CustomCodeFormatter
+
+
+class WrongFormatterName(CustomCodeFormatter):
+    """Invalid formatter: correct name is CodeFormatter."""
+    def apply(self, code: str) -> str:
+        return f'# a comment\n{code}'
diff -pruN 0.26.4-3/tests/data/python/space_and_special_characters_dict.py 0.34.0-1/tests/data/python/space_and_special_characters_dict.py
--- 0.26.4-3/tests/data/python/space_and_special_characters_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/python/space_and_special_characters_dict.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,26 @@
+{
+    "Serial Number": "A12345678",
+    "Timestamp": "2020-05-26T12:15:25.792741Z",
+    "Data": {
+        "Length (m)": 12.34,
+        "Symmetric deviation (%)": 12.216564148290807,
+        "Total running time (s)": 974,
+        "Mass (kg)": 42.23,
+        "Initial parameters": {
+            "V1": 123,
+            "V2": 456
+        },
+        "class": "Unknown"
+    },
+    "values": {
+        "1 Step": "42",
+        "2 Step": "23"
+    },
+    "recursive": {
+        "sub": {
+            "recursive": {
+                "value": 42.23
+            }
+        }
+    }
+}
diff -pruN 0.26.4-3/tests/data/templates/pydantic/BaseModel.jinja2 0.34.0-1/tests/data/templates/pydantic/BaseModel.jinja2
--- 0.26.4-3/tests/data/templates/pydantic/BaseModel.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/templates/pydantic/BaseModel.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+class {{ class_name }}({{ base_class }}):{% if comment is defined %}  # {{ comment }}{% endif %}
+{%- if not fields %}
+    pass
+{%- endif %}
+{%- for field in fields -%}
+    {%- if field.required %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- else %}
+    {{ field.name }}: {{ field.type_hint }} = {{ field.default }}
+    {%- endif %}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring }}
+    """
+    {%- endif %}
+{%- endfor -%}
diff -pruN 0.26.4-3/tests/data/templates_old_style/BaseModel.jinja2 0.34.0-1/tests/data/templates_old_style/BaseModel.jinja2
--- 0.26.4-3/tests/data/templates_old_style/BaseModel.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/templates_old_style/BaseModel.jinja2	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,19 @@
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+class {{ class_name }}({{ base_class }}):{% if comment is defined %}  # {{ comment }}{% endif %}
+{%- if not fields %}
+    pass
+{%- endif %}
+{%- for field in fields -%}
+    {%- if field.required %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- else %}
+    {{ field.name }}: {{ field.type_hint }} = {{ field.default }}
+    {%- endif %}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring }}
+    """
+    {%- endif %}
+{%- endfor -%}
diff -pruN 0.26.4-3/tests/data/yaml/pet.yaml 0.34.0-1/tests/data/yaml/pet.yaml
--- 0.26.4-3/tests/data/yaml/pet.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/data/yaml/pet.yaml	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3 @@
+Pet:
+  name: cat
+  age: 3
\ No newline at end of file
diff -pruN 0.26.4-3/tests/main/graphql/test_annotated.py 0.34.0-1/tests/main/graphql/test_annotated.py
--- 0.26.4-3/tests/main/graphql/test_annotated.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/main/graphql/test_annotated.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,128 @@
+from __future__ import annotations
+
+from argparse import Namespace
+from typing import TYPE_CHECKING
+
+import pytest
+from freezegun import freeze_time
+
+from datamodel_code_generator.__main__ import Exit, main
+from tests.main.test_main_general import DATA_PATH, EXPECTED_MAIN_PATH
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+GRAPHQL_DATA_PATH: Path = DATA_PATH / "graphql"
+EXPECTED_GRAPHQL_PATH: Path = EXPECTED_MAIN_PATH / "graphql"
+
+
+@pytest.fixture(autouse=True)
+def reset_namespace(monkeypatch: pytest.MonkeyPatch) -> None:
+    namespace_ = Namespace(no_color=False)
+    monkeypatch.setattr("datamodel_code_generator.__main__.namespace", namespace_)
+    monkeypatch.setattr("datamodel_code_generator.arguments.namespace", namespace_)
+
+
+@freeze_time("2019-07-26")
+def test_annotated(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "annotated.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--use-annotated",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_GRAPHQL_PATH / "annotated.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_annotated_use_standard_collections(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "annotated.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--use-annotated",
+        "--use-standard-collections",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_GRAPHQL_PATH / "annotated_use_standard_collections.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_annotated_use_standard_collections_use_union_operator(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "annotated.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--use-annotated",
+        "--use-standard-collections",
+        "--use-union-operator",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_GRAPHQL_PATH / "annotated_use_standard_collections_use_union_operator.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_annotated_use_union_operator(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "annotated.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--use-annotated",
+        "--use-union-operator",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_GRAPHQL_PATH / "annotated_use_union_operator.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_annotated_field_aliases(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "field-aliases.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--use-annotated",
+        "--aliases",
+        str(GRAPHQL_DATA_PATH / "field-aliases.json"),
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_GRAPHQL_PATH / "annotated_field_aliases.py").read_text()
diff -pruN 0.26.4-3/tests/main/graphql/test_main_graphql.py 0.34.0-1/tests/main/graphql/test_main_graphql.py
--- 0.26.4-3/tests/main/graphql/test_main_graphql.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/main/graphql/test_main_graphql.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,316 @@
+from __future__ import annotations
+
+from argparse import Namespace
+from typing import TYPE_CHECKING
+
+import black
+import isort
+import pytest
+from freezegun import freeze_time
+
+from datamodel_code_generator.__main__ import Exit, main
+from tests.main.test_main_general import DATA_PATH, EXPECTED_MAIN_PATH
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+GRAPHQL_DATA_PATH: Path = DATA_PATH / "graphql"
+EXPECTED_GRAPHQL_PATH: Path = EXPECTED_MAIN_PATH / "graphql"
+
+
+@pytest.fixture(autouse=True)
+def reset_namespace(monkeypatch: pytest.MonkeyPatch) -> None:
+    namespace_ = Namespace(no_color=False)
+    monkeypatch.setattr("datamodel_code_generator.__main__.namespace", namespace_)
+    monkeypatch.setattr("datamodel_code_generator.arguments.namespace", namespace_)
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "simple_star_wars.py",
+        ),
+        (
+            "dataclasses.dataclass",
+            "simple_star_wars_dataclass.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_simple_star_wars(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "simple-star-wars.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+        "--output-model",
+        output_model,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_GRAPHQL_PATH / expected_output).read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_different_types_of_fields(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "different-types-of-fields.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_GRAPHQL_PATH / "different_types_of_fields.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_use_default_kwarg(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "annotated.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+        "--use-default-kwarg",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_GRAPHQL_PATH / "annotated_use_default_kwarg.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_custom_scalar_types(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "custom-scalar-types.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+        "--extra-template-data",
+        str(GRAPHQL_DATA_PATH / "custom-scalar-types.json"),
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_GRAPHQL_PATH / "custom_scalar_types.py").read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_field_aliases(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "field-aliases.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+        "--aliases",
+        str(GRAPHQL_DATA_PATH / "field-aliases.json"),
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_GRAPHQL_PATH / "field_aliases.py").read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_enums(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "enums.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_GRAPHQL_PATH / "enums.py").read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_union(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "union.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_GRAPHQL_PATH / "union.py").read_text()
+
+
+@pytest.mark.skipif(
+    not isort.__version__.startswith("4."),
+    reason="See https://github.com/PyCQA/isort/issues/1600 for example",
+)
+@freeze_time("2019-07-26")
+def test_main_graphql_additional_imports_isort_4(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "additional-imports.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+        "--extra-template-data",
+        str(GRAPHQL_DATA_PATH / "additional-imports-types.json"),
+        "--additional-imports",
+        "datetime.datetime,datetime.date,mymodule.myclass.MyCustomPythonClass",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_GRAPHQL_PATH / "additional_imports_isort4.py").read_text()
+    )
+
+
+@pytest.mark.skipif(
+    isort.__version__.startswith("4."),
+    reason="See https://github.com/PyCQA/isort/issues/1600 for example",
+)
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_additional_imports_isort_5_or_6(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "additional-imports.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+        "--extra-template-data",
+        str(GRAPHQL_DATA_PATH / "additional-imports-types.json"),
+        "--additional-imports",
+        "datetime.datetime,datetime.date,mymodule.myclass.MyCustomPythonClass",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_GRAPHQL_PATH / "additional_imports_isort5.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_custom_formatters(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "custom-scalar-types.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+        "--custom-formatters",
+        "tests.data.python.custom_formatters.add_comment",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_GRAPHQL_PATH / "custom_formatters.py").read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_use_standard_collections(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "use-standard-collections.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+        "--use-standard-collections",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_GRAPHQL_PATH / "use_standard_collections.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_use_union_operator(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "use-union-operator.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+        "--use-union-operator",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_GRAPHQL_PATH / "use_union_operator.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_graphql_extra_fields_allow(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "simple-star-wars.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+        "--extra-fields",
+        "allow",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_GRAPHQL_PATH / "simple_star_wars_extra_fields_allow.py").read_text()
+    )
diff -pruN 0.26.4-3/tests/main/jsonschema/test_main_jsonschema.py 0.34.0-1/tests/main/jsonschema/test_main_jsonschema.py
--- 0.26.4-3/tests/main/jsonschema/test_main_jsonschema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/main/jsonschema/test_main_jsonschema.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,3322 @@
+from __future__ import annotations
+
+import contextlib
+import json
+import shutil
+from argparse import Namespace
+from collections import defaultdict
+from pathlib import Path
+from typing import TYPE_CHECKING
+from unittest.mock import call
+
+import black
+import isort
+import pytest
+from freezegun import freeze_time
+from packaging import version
+
+from datamodel_code_generator import (
+    MIN_VERSION,
+    DataModelType,
+    InputFileType,
+    PythonVersionMin,
+    chdir,
+    generate,
+)
+from datamodel_code_generator.__main__ import Exit, main
+from tests.main.test_main_general import DATA_PATH, EXPECTED_MAIN_PATH, TIMESTAMP
+
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+with contextlib.suppress(ImportError):
+    pass
+
+
+FixtureRequest = pytest.FixtureRequest
+
+
+JSON_SCHEMA_DATA_PATH: Path = DATA_PATH / "jsonschema"
+EXPECTED_JSON_SCHEMA_PATH: Path = EXPECTED_MAIN_PATH / "jsonschema"
+
+
+@pytest.fixture(autouse=True)
+def reset_namespace(monkeypatch: pytest.MonkeyPatch) -> None:
+    namespace_ = Namespace(no_color=False)
+    monkeypatch.setattr("datamodel_code_generator.__main__.namespace", namespace_)
+    monkeypatch.setattr("datamodel_code_generator.arguments.namespace", namespace_)
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_inheritance_forward_ref(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    shutil.copy(DATA_PATH / "pyproject.toml", tmp_path / "pyproject.toml")
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "inheritance_forward_ref.json"),
+        "--output",
+        str(output_file),
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "inheritance_forward_ref.py").read_text()
+    )
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_inheritance_forward_ref_keep_model_order(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    shutil.copy(DATA_PATH / "pyproject.toml", tmp_path / "pyproject.toml")
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "inheritance_forward_ref.json"),
+        "--output",
+        str(output_file),
+        "--keep-model-order",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "inheritance_forward_ref_keep_model_order.py").read_text()
+    )
+
+
+@pytest.mark.skip(reason="pytest-xdist does not support the test")
+@freeze_time("2019-07-26")
+def test_main_without_arguments() -> None:
+    with pytest.raises(SystemExit):
+        main()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_autodetect(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "person.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "auto",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "autodetect.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_autodetect_failed(tmp_path: Path) -> None:
+    input_file: Path = tmp_path / "input.yaml"
+    output_file: Path = tmp_path / "output.py"
+
+    input_file.write_text(":", encoding="utf-8")
+
+    return_code: Exit = main([
+        "--input",
+        str(input_file),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "auto",
+    ])
+    assert return_code == Exit.ERROR
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "person.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "general.py").read_text()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_jsonschema_nested_deep(tmp_path: Path) -> None:
+    output_init_file: Path = tmp_path / "__init__.py"
+    output_nested_file: Path = tmp_path / "nested/deep.py"
+    output_empty_parent_nested_file: Path = tmp_path / "empty_parent/nested/deep.py"
+
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "nested_person.json"),
+        "--output",
+        str(tmp_path),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_init_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "nested_deep" / "__init__.py").read_text()
+    )
+
+    assert (
+        output_nested_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "nested_deep" / "nested" / "deep.py").read_text()
+    )
+    assert (
+        output_empty_parent_nested_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "nested_deep" / "empty_parent" / "nested" / "deep.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_nested_skip(tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "nested_skip.json"),
+        "--output",
+        str(tmp_path),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    nested_skip_dir = EXPECTED_JSON_SCHEMA_PATH / "nested_skip"
+    for path in nested_skip_dir.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(nested_skip_dir)).read_text()
+        assert result == path.read_text()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_jsonschema_external_files(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "external_parent_root.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "external_files.py").read_text()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_jsonschema_collapsed_external_references(tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "external_reference"),
+        "--output",
+        str(tmp_path),
+        "--input-file-type",
+        "jsonschema",
+        "--collapse-root-models",
+    ])
+    assert return_code == Exit.OK
+    assert (tmp_path / "ref0.py").read_text() == (EXPECTED_JSON_SCHEMA_PATH / "external_ref0.py").read_text()
+    assert (tmp_path / "other/ref2.py").read_text() == (
+        EXPECTED_JSON_SCHEMA_PATH / "external_other_ref2.py"
+    ).read_text()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_jsonschema_multiple_files(tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "multiple_files"),
+        "--output",
+        str(tmp_path),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    main_modular_dir = EXPECTED_JSON_SCHEMA_PATH / "multiple_files"
+    for path in main_modular_dir.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        assert result == path.read_text()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_jsonschema_no_empty_collapsed_external_model(tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "external_collapse"),
+        "--output",
+        str(tmp_path),
+        "--input-file-type",
+        "jsonschema",
+        "--collapse-root-models",
+    ])
+    assert return_code == Exit.OK
+    assert not (tmp_path / "child.py").exists()
+    assert (tmp_path / "__init__.py").exists()
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "null_and_array.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "null_and_array_v2.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+def test_main_null_and_array(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "null_and_array.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--output-model",
+        output_model,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / expected_output).read_text()
+
+
+@freeze_time("2019-07-26")
+def test_use_default_pydantic_v2_with_json_schema_const(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "use_default_with_const.json"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--use-default",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "use_default_with_const.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.parametrize(
+    ("output_model", "expected_output", "option"),
+    [
+        (
+            "pydantic.BaseModel",
+            "complicated_enum_default_member.py",
+            "--set-default-enum-member",
+        ),
+        (
+            "dataclasses.dataclass",
+            "complicated_enum_default_member_dataclass.py",
+            "--set-default-enum-member",
+        ),
+        (
+            "dataclasses.dataclass",
+            "complicated_enum_default_member_dataclass.py",
+            None,
+        ),
+    ],
+)
+def test_main_complicated_enum_default_member(
+    output_model: str, expected_output: str, option: str | None, tmp_path: Path
+) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        a
+        for a in [
+            "--input",
+            str(JSON_SCHEMA_DATA_PATH / "complicated_enum.json"),
+            "--output",
+            str(output_file),
+            option,
+            "--output-model",
+            output_model,
+        ]
+        if a
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / expected_output).read_text()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_json_reuse_enum_default_member(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "duplicate_enum.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--reuse-model",
+        "--set-default-enum-member",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "json_reuse_enum_default_member.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_invalid_model_name_failed(capsys: pytest.CaptureFixture, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "invalid_model_name.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--class-name",
+        "with",
+    ])
+    captured = capsys.readouterr()
+    assert return_code == Exit.ERROR
+    assert captured.err == "title='with' is invalid class name. You have to set `--class-name` option\n"
+
+
+@freeze_time("2019-07-26")
+def test_main_invalid_model_name_converted(capsys: pytest.CaptureFixture, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "invalid_model_name.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    captured = capsys.readouterr()
+    assert return_code == Exit.ERROR
+    assert captured.err == "title='1Xyz' is invalid class name. You have to set `--class-name` option\n"
+
+
+@freeze_time("2019-07-26")
+def test_main_invalid_model_name(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "invalid_model_name.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--class-name",
+        "ValidModelName",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "invalid_model_name.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_root_id_jsonschema_with_local_file(mocker: MockerFixture, tmp_path: Path) -> None:
+    root_id_response = mocker.Mock()
+    root_id_response.text = "dummy"
+    person_response = mocker.Mock()
+    person_response.text = (JSON_SCHEMA_DATA_PATH / "person.json").read_text()
+    httpx_get_mock = mocker.patch("httpx.get", side_effect=[person_response])
+
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "root_id.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "root_id.py").read_text()
+    httpx_get_mock.assert_not_called()
+
+
+@freeze_time("2019-07-26")
+def test_main_root_id_jsonschema_with_remote_file(mocker: MockerFixture, tmp_path: Path) -> None:
+    root_id_response = mocker.Mock()
+    root_id_response.text = "dummy"
+    person_response = mocker.Mock()
+    person_response.text = (JSON_SCHEMA_DATA_PATH / "person.json").read_text()
+    httpx_get_mock = mocker.patch("httpx.get", side_effect=[person_response])
+
+    input_file = tmp_path / "root_id.json"
+    shutil.copy(JSON_SCHEMA_DATA_PATH / "root_id.json", input_file)
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(input_file),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "root_id.py").read_text()
+    httpx_get_mock.assert_has_calls([
+        call(
+            "https://example.com/person.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+    ])
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_root_id_jsonschema_self_refs_with_local_file(mocker: MockerFixture, tmp_path: Path) -> None:
+    person_response = mocker.Mock()
+    person_response.text = (JSON_SCHEMA_DATA_PATH / "person.json").read_text()
+    httpx_get_mock = mocker.patch("httpx.get", side_effect=[person_response])
+
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "root_id_self_ref.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "root_id.py").read_text().replace(
+        "filename:  root_id.json", "filename:  root_id_self_ref.json"
+    )
+    httpx_get_mock.assert_not_called()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_root_id_jsonschema_self_refs_with_remote_file(mocker: MockerFixture, tmp_path: Path) -> None:
+    person_response = mocker.Mock()
+    person_response.text = (JSON_SCHEMA_DATA_PATH / "person.json").read_text()
+    httpx_get_mock = mocker.patch("httpx.get", side_effect=[person_response])
+
+    input_file = tmp_path / "root_id_self_ref.json"
+    shutil.copy(JSON_SCHEMA_DATA_PATH / "root_id_self_ref.json", input_file)
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(input_file),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "root_id.py").read_text().replace(
+        "filename:  root_id.json", "filename:  root_id_self_ref.json"
+    )
+    httpx_get_mock.assert_has_calls([
+        call(
+            "https://example.com/person.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+    ])
+
+
+@freeze_time("2019-07-26")
+def test_main_root_id_jsonschema_with_absolute_remote_file(mocker: MockerFixture, tmp_path: Path) -> None:
+    root_id_response = mocker.Mock()
+    root_id_response.text = "dummy"
+    person_response = mocker.Mock()
+    person_response.text = (JSON_SCHEMA_DATA_PATH / "person.json").read_text()
+    httpx_get_mock = mocker.patch("httpx.get", side_effect=[person_response])
+
+    input_file = tmp_path / "root_id_absolute_url.json"
+    shutil.copy(JSON_SCHEMA_DATA_PATH / "root_id_absolute_url.json", input_file)
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(input_file),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "root_id_absolute_url.py").read_text()
+    )
+    httpx_get_mock.assert_has_calls([
+        call(
+            "https://example.com/person.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+    ])
+
+
+@freeze_time("2019-07-26")
+def test_main_root_id_jsonschema_with_absolute_local_file(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "root_id_absolute_url.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "root_id_absolute_url.py").read_text()
+    )
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_jsonschema_id(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "id.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "id.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_id_as_stdin(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    monkeypatch.setattr("sys.stdin", (JSON_SCHEMA_DATA_PATH / "id.json").open())
+    return_code: Exit = main([
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "id_stdin.py").read_text()
+
+
+def test_main_jsonschema_ids(tmp_path: Path) -> None:
+    input_filename = JSON_SCHEMA_DATA_PATH / "ids" / "Organization.schema.json"
+    output_path = tmp_path / "model"
+
+    with freeze_time(TIMESTAMP):
+        main([
+            "--input",
+            str(input_filename),
+            "--output",
+            str(output_path),
+            "--input-file-type",
+            "jsonschema",
+        ])
+    main_jsonschema_ids_dir = EXPECTED_JSON_SCHEMA_PATH / "ids"
+    for path in main_jsonschema_ids_dir.rglob("*.py"):
+        result = output_path.joinpath(path.relative_to(main_jsonschema_ids_dir)).read_text()
+        assert result == path.read_text()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_external_definitions(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "external_definitions_root.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "external_definitions.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_external_files_in_directory(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "external_files_in_directory" / "person.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "external_files_in_directory.py").read_text()
+    )
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_nested_directory(tmp_path: Path) -> None:
+    output_path = tmp_path / "model"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "external_files_in_directory"),
+        "--output",
+        str(output_path),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    main_nested_directory = EXPECTED_JSON_SCHEMA_PATH / "nested_directory"
+
+    for path in main_nested_directory.rglob("*.py"):
+        result = output_path.joinpath(path.relative_to(main_nested_directory)).read_text()
+        assert result == path.read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_circular_reference(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "circular_reference.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "circular_reference.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_invalid_enum_name(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "invalid_enum_name.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "invalid_enum_name.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_invalid_enum_name_snake_case_field(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "invalid_enum_name.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--snake-case-field",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "invalid_enum_name_snake_case_field.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_json_reuse_enum(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "duplicate_enum.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--reuse-model",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "json_reuse_enum.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_json_capitalise_enum_members(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "many_case_enum.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--capitalise-enum-members",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "json_capitalise_enum_members.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_json_capitalise_enum_members_without_enum(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "person.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--capitalise-enum-members",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "autodetect.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_similar_nested_array(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "similar_nested_array.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "similar_nested_array.py").read_text()
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "require_referenced_field",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "require_referenced_field_pydantic_v2",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+def test_main_require_referenced_field(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "require_referenced_field/"),
+        "--output",
+        str(tmp_path),
+        "--input-file-type",
+        "jsonschema",
+        "--output-datetime-class",
+        "AwareDatetime",
+        "--output-model-type",
+        output_model,
+    ])
+    assert return_code == Exit.OK
+
+    assert (tmp_path / "referenced.py").read_text() == (
+        EXPECTED_JSON_SCHEMA_PATH / expected_output / "referenced.py"
+    ).read_text()
+    assert (tmp_path / "required.py").read_text() == (
+        EXPECTED_JSON_SCHEMA_PATH / expected_output / "required.py"
+    ).read_text()
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "require_referenced_field",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "require_referenced_field_naivedatetime",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+def test_main_require_referenced_field_naive_datetime(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "require_referenced_field/"),
+        "--output",
+        str(tmp_path),
+        "--input-file-type",
+        "jsonschema",
+        "--output-datetime-class",
+        "NaiveDatetime",
+        "--output-model-type",
+        output_model,
+    ])
+    assert return_code == Exit.OK
+
+    assert (tmp_path / "referenced.py").read_text() == (
+        EXPECTED_JSON_SCHEMA_PATH / expected_output / "referenced.py"
+    ).read_text()
+    assert (tmp_path / "required.py").read_text() == (
+        EXPECTED_JSON_SCHEMA_PATH / expected_output / "required.py"
+    ).read_text()
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "require_referenced_field",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "require_referenced_field_pydantic_v2",
+        ),
+        (
+            "msgspec.Struct",
+            "require_referenced_field_msgspec",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+def test_main_require_referenced_field_datetime(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "require_referenced_field/"),
+        "--output",
+        str(object=tmp_path),
+        "--input-file-type",
+        "jsonschema",
+        "--output-model-type",
+        output_model,
+    ])
+    assert return_code == Exit.OK
+
+    assert (tmp_path / "referenced.py").read_text() == (
+        EXPECTED_JSON_SCHEMA_PATH / expected_output / "referenced.py"
+    ).read_text()
+    assert (tmp_path / "required.py").read_text() == (
+        EXPECTED_JSON_SCHEMA_PATH / expected_output / "required.py"
+    ).read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_json_pointer(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "json_pointer.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "json_pointer.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_nested_json_pointer(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "nested_json_pointer.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "nested_json_pointer.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_multiple_files_json_pointer(tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "multiple_files_json_pointer"),
+        "--output",
+        str(tmp_path),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    main_modular_dir = EXPECTED_JSON_SCHEMA_PATH / "multiple_files_json_pointer"
+    for path in main_modular_dir.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        assert result == path.read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_root_model_with_additional_properties(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "root_model_with_additional_properties.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "root_model_with_additional_properties.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_root_model_with_additional_properties_use_generic_container_types(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "root_model_with_additional_properties.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--use-generic-container-types",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (
+            EXPECTED_JSON_SCHEMA_PATH / "root_model_with_additional_properties_use_generic_container_types.py"
+        ).read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_root_model_with_additional_properties_use_standard_collections(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "root_model_with_additional_properties.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--use-standard-collections",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "root_model_with_additional_properties_use_standard_collections.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_root_model_with_additional_properties_literal(min_version: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "root_model_with_additional_properties.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--enum-field-as-literal",
+        "all",
+        "--target-python-version",
+        min_version,
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "root_model_with_additional_properties_literal.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_multiple_files_ref(tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "multiple_files_self_ref"),
+        "--output",
+        str(tmp_path),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    main_modular_dir = EXPECTED_JSON_SCHEMA_PATH / "multiple_files_self_ref"
+    for path in main_modular_dir.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        assert result == path.read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_multiple_files_ref_test_json(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    with chdir(JSON_SCHEMA_DATA_PATH / "multiple_files_self_ref"):
+        return_code: Exit = main([
+            "--input",
+            "test.json",
+            "--output",
+            str(output_file),
+            "--input-file-type",
+            "jsonschema",
+        ])
+        assert return_code == Exit.OK
+        assert (
+            output_file.read_text(encoding="utf-8")
+            == (EXPECTED_JSON_SCHEMA_PATH / "multiple_files_self_ref_single.py").read_text()
+        )
+
+
+@freeze_time("2019-07-26")
+def test_main_space_field_enum_snake_case_field(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    with chdir(JSON_SCHEMA_DATA_PATH / "space_field_enum.json"):
+        return_code: Exit = main([
+            "--input",
+            "space_field_enum.json",
+            "--output",
+            str(output_file),
+            "--input-file-type",
+            "jsonschema",
+            "--snake-case-field",
+            "--original-field-name-delimiter",
+            " ",
+        ])
+        assert return_code == Exit.OK
+        assert (
+            output_file.read_text(encoding="utf-8")
+            == (EXPECTED_JSON_SCHEMA_PATH / "space_field_enum_snake_case_field.py").read_text()
+        )
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_all_of_ref(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    with chdir(JSON_SCHEMA_DATA_PATH / "all_of_ref"):
+        return_code: Exit = main([
+            "--input",
+            "test.json",
+            "--output",
+            str(output_file),
+            "--input-file-type",
+            "jsonschema",
+            "--class-name",
+            "Test",
+        ])
+        assert return_code == Exit.OK
+        assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "all_of_ref.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_all_of_with_object(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    with chdir(JSON_SCHEMA_DATA_PATH):
+        return_code: Exit = main([
+            "--input",
+            "all_of_with_object.json",
+            "--output",
+            str(output_file),
+            "--input-file-type",
+            "jsonschema",
+        ])
+        assert return_code == Exit.OK
+        assert (
+            output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "all_of_with_object.py").read_text()
+        )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+@freeze_time("2019-07-26")
+def test_main_combined_array(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    with chdir(JSON_SCHEMA_DATA_PATH):
+        return_code: Exit = main([
+            "--input",
+            "combined_array.json",
+            "--output",
+            str(output_file),
+            "--input-file-type",
+            "jsonschema",
+        ])
+        assert return_code == Exit.OK
+        assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "combined_array.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_pattern(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "pattern.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "pattern.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_generate(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    input_ = (JSON_SCHEMA_DATA_PATH / "person.json").relative_to(Path.cwd())
+    assert not input_.is_absolute()
+    generate(
+        input_=input_,
+        input_file_type=InputFileType.JsonSchema,
+        output=output_file,
+    )
+
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "general.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_generate_non_pydantic_output(tmp_path: Path) -> None:
+    """
+    See https://github.com/koxudaxi/datamodel-code-generator/issues/1452.
+    """
+    output_file: Path = tmp_path / "output.py"
+    input_ = (JSON_SCHEMA_DATA_PATH / "simple_string.json").relative_to(Path.cwd())
+    assert not input_.is_absolute()
+    generate(
+        input_=input_,
+        input_file_type=InputFileType.JsonSchema,
+        output=output_file,
+        output_model_type=DataModelType.DataclassesDataclass,
+    )
+
+    file = EXPECTED_JSON_SCHEMA_PATH / "generate_non_pydantic_output.py"
+    assert output_file.read_text(encoding="utf-8") == file.read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_generate_from_directory(tmp_path: Path) -> None:
+    input_ = (JSON_SCHEMA_DATA_PATH / "external_files_in_directory").relative_to(Path.cwd())
+    assert not input_.is_absolute()
+    assert input_.is_dir()
+    generate(
+        input_=input_,
+        input_file_type=InputFileType.JsonSchema,
+        output=tmp_path,
+    )
+
+    main_nested_directory = EXPECTED_JSON_SCHEMA_PATH / "nested_directory"
+
+    for path in main_nested_directory.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(main_nested_directory)).read_text()
+        assert result == path.read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_generate_custom_class_name_generator(tmp_path: Path) -> None:
+    def custom_class_name_generator(title: str) -> str:
+        return f"Custom{title}"
+
+    output_file: Path = tmp_path / "output.py"
+    input_ = (JSON_SCHEMA_DATA_PATH / "person.json").relative_to(Path.cwd())
+    assert not input_.is_absolute()
+    generate(
+        input_=input_,
+        input_file_type=InputFileType.JsonSchema,
+        output=output_file,
+        custom_class_name_generator=custom_class_name_generator,
+    )
+
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "general.py").read_text().replace(
+        "Person", "CustomPerson"
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_generate_custom_class_name_generator_additional_properties(tmp_path: Path) -> None:
+    output_file = tmp_path / "models.py"
+
+    def custom_class_name_generator(name: str) -> str:
+        return f"Custom{name[0].upper() + name[1:]}"
+
+    input_ = (JSON_SCHEMA_DATA_PATH / "root_model_with_additional_properties.json").relative_to(Path.cwd())
+    assert not input_.is_absolute()
+    generate(
+        input_=input_,
+        input_file_type=InputFileType.JsonSchema,
+        output=output_file,
+        custom_class_name_generator=custom_class_name_generator,
+    )
+
+    assert (
+        output_file.read_text()
+        == (EXPECTED_JSON_SCHEMA_PATH / "root_model_with_additional_properties_custom_class_name.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_http_jsonschema(mocker: MockerFixture, tmp_path: Path) -> None:
+    external_directory = JSON_SCHEMA_DATA_PATH / "external_files_in_directory"
+
+    def get_mock_response(path: str) -> mocker.Mock:
+        mock = mocker.Mock()
+        mock.text = (external_directory / path).read_text()
+        return mock
+
+    httpx_get_mock = mocker.patch(
+        "httpx.get",
+        side_effect=[
+            get_mock_response("person.json"),
+            get_mock_response("definitions/relative/animal/pet/pet.json"),
+            get_mock_response("definitions/relative/animal/fur.json"),
+            get_mock_response("definitions/friends.json"),
+            get_mock_response("definitions/food.json"),
+            get_mock_response("definitions/machine/robot.json"),
+            get_mock_response("definitions/drink/coffee.json"),
+            get_mock_response("definitions/drink/tea.json"),
+        ],
+    )
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--url",
+        "https://example.com/external_files_in_directory/person.json",
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (
+        EXPECTED_JSON_SCHEMA_PATH / "external_files_in_directory.py"
+    ).read_text().replace(
+        "#   filename:  person.json",
+        "#   filename:  https://example.com/external_files_in_directory/person.json",
+    )
+    httpx_get_mock.assert_has_calls([
+        call(
+            "https://example.com/external_files_in_directory/person.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+        call(
+            "https://example.com/external_files_in_directory/definitions/relative/animal/pet/pet.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+        call(
+            "https://example.com/external_files_in_directory/definitions/relative/animal/fur.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+        call(
+            "https://example.com/external_files_in_directory/definitions/friends.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+        call(
+            "https://example.com/external_files_in_directory/definitions/food.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+        call(
+            "https://example.com/external_files_in_directory/definitions/machine/robot.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+        call(
+            "https://example.com/external_files_in_directory/definitions/drink/coffee.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+        call(
+            "https://example.com/external_files_in_directory/definitions/drink/tea.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+    ])
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.parametrize(
+    (
+        "headers_arguments",
+        "headers_requests",
+        "query_parameters_arguments",
+        "query_parameters_requests",
+        "http_ignore_tls",
+    ),
+    [
+        (
+            ("Authorization: Basic dXNlcjpwYXNz",),
+            [("Authorization", "Basic dXNlcjpwYXNz")],
+            ("key=value",),
+            [("key", "value")],
+            False,
+        ),
+        (
+            ("Authorization: Basic dXNlcjpwYXNz", "X-API-key: abcefg"),
+            [("Authorization", "Basic dXNlcjpwYXNz"), ("X-API-key", "abcefg")],
+            ("key=value", "newkey=newvalue"),
+            [("key", "value"), ("newkey", "newvalue")],
+            True,
+        ),
+    ],
+)
+def test_main_http_jsonschema_with_http_headers_and_http_query_parameters_and_ignore_tls(
+    mocker: MockerFixture,
+    headers_arguments: tuple[str, str],
+    headers_requests: list[tuple[str, str]],
+    query_parameters_arguments: tuple[str, ...],
+    query_parameters_requests: list[tuple[str, str]],
+    http_ignore_tls: bool,
+    tmp_path: Path,
+) -> None:
+    external_directory = JSON_SCHEMA_DATA_PATH / "external_files_in_directory"
+
+    def get_mock_response(path: str) -> mocker.Mock:
+        mock = mocker.Mock()
+        mock.text = (external_directory / path).read_text()
+        return mock
+
+    httpx_get_mock = mocker.patch(
+        "httpx.get",
+        side_effect=[
+            get_mock_response("person.json"),
+            get_mock_response("definitions/relative/animal/pet/pet.json"),
+            get_mock_response("definitions/relative/animal/fur.json"),
+            get_mock_response("definitions/friends.json"),
+            get_mock_response("definitions/food.json"),
+            get_mock_response("definitions/machine/robot.json"),
+            get_mock_response("definitions/drink/coffee.json"),
+            get_mock_response("definitions/drink/tea.json"),
+        ],
+    )
+    output_file: Path = tmp_path / "output.py"
+    args = [
+        "--url",
+        "https://example.com/external_files_in_directory/person.json",
+        "--http-headers",
+        *headers_arguments,
+        "--http-query-parameters",
+        *query_parameters_arguments,
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ]
+    if http_ignore_tls:
+        args.append("--http-ignore-tls")
+
+    return_code: Exit = main(args)
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (
+        EXPECTED_JSON_SCHEMA_PATH / "external_files_in_directory.py"
+    ).read_text().replace(
+        "#   filename:  person.json",
+        "#   filename:  https://example.com/external_files_in_directory/person.json",
+    )
+    httpx_get_mock.assert_has_calls([
+        call(
+            "https://example.com/external_files_in_directory/person.json",
+            headers=headers_requests,
+            verify=bool(not http_ignore_tls),
+            follow_redirects=True,
+            params=query_parameters_requests,
+        ),
+        call(
+            "https://example.com/external_files_in_directory/definitions/relative/animal/pet/pet.json",
+            headers=headers_requests,
+            verify=bool(not http_ignore_tls),
+            follow_redirects=True,
+            params=query_parameters_requests,
+        ),
+        call(
+            "https://example.com/external_files_in_directory/definitions/relative/animal/fur.json",
+            headers=headers_requests,
+            verify=bool(not http_ignore_tls),
+            follow_redirects=True,
+            params=query_parameters_requests,
+        ),
+        call(
+            "https://example.com/external_files_in_directory/definitions/friends.json",
+            headers=headers_requests,
+            verify=bool(not http_ignore_tls),
+            follow_redirects=True,
+            params=query_parameters_requests,
+        ),
+        call(
+            "https://example.com/external_files_in_directory/definitions/food.json",
+            headers=headers_requests,
+            verify=bool(not http_ignore_tls),
+            follow_redirects=True,
+            params=query_parameters_requests,
+        ),
+        call(
+            "https://example.com/external_files_in_directory/definitions/machine/robot.json",
+            headers=headers_requests,
+            verify=bool(not http_ignore_tls),
+            follow_redirects=True,
+            params=query_parameters_requests,
+        ),
+        call(
+            "https://example.com/external_files_in_directory/definitions/drink/coffee.json",
+            headers=headers_requests,
+            verify=bool(not http_ignore_tls),
+            follow_redirects=True,
+            params=query_parameters_requests,
+        ),
+        call(
+            "https://example.com/external_files_in_directory/definitions/drink/tea.json",
+            headers=headers_requests,
+            verify=bool(not http_ignore_tls),
+            follow_redirects=True,
+            params=query_parameters_requests,
+        ),
+    ])
+
+
+@freeze_time("2019-07-26")
+def test_main_self_reference(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "self_reference.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "self_reference.py").read_text()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_strict_types(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "strict_types.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "strict_types.py").read_text()
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+@freeze_time("2019-07-26")
+def test_main_strict_types_all(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "strict_types.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--strict-types",
+        "str",
+        "bytes",
+        "int",
+        "float",
+        "bool",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "strict_types_all.py").read_text()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_strict_types_all_with_field_constraints(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "strict_types.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--strict-types",
+        "str",
+        "bytes",
+        "int",
+        "float",
+        "bool",
+        "--field-constraints",
+    ])
+
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "strict_types_all_field_constraints.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_special_enum(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "special_enum.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "special_enum.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_special_enum_special_field_name_prefix(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "special_enum.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--special-field-name-prefix",
+        "special",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "special_enum_special_field_name_prefix.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_special_enum_special_field_name_prefix_keep_private(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "special_enum.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--special-field-name-prefix",
+        "",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "special_enum_special_field_name_prefix_keep_private.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_special_model_remove_special_field_name_prefix(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "special_prefix_model.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--remove-special-field-name-prefix",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "special_model_remove_special_field_name_prefix.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_subclass_enum(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "subclass_enum.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--use-subclass-enum",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "subclass_enum.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_special_enum_empty_enum_field_name(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "special_enum.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--empty-enum-field-name",
+        "empty",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "special_enum_empty_enum_field_name.py").read_text()
+    )
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_jsonschema_special_field_name(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "special_field_name.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "special_field_name.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_complex_one_of(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "complex_one_of.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "complex_one_of.py").read_text()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_jsonschema_complex_any_of(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "complex_any_of.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "complex_any_of.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_combine_one_of_object(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "combine_one_of_object.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "combine_one_of_object.py").read_text()
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.parametrize(
+    ("union_mode", "output_model", "expected_output"),
+    [
+        (None, "pydantic.BaseModel", "combine_any_of_object.py"),
+        (None, "pydantic_v2.BaseModel", "combine_any_of_object_v2.py"),
+        (
+            "left_to_right",
+            "pydantic_v2.BaseModel",
+            "combine_any_of_object_left_to_right.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+def test_main_jsonschema_combine_any_of_object(
+    union_mode: str | None, output_model: str, expected_output: str, tmp_path: Path
+) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main(
+        [
+            "--input",
+            str(JSON_SCHEMA_DATA_PATH / "combine_any_of_object.json"),
+            "--output",
+            str(output_file),
+            "--input-file-type",
+            "jsonschema",
+            "--output-model",
+            output_model,
+        ]
+        + ([] if union_mode is None else ["--union-mode", union_mode])
+    )
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / expected_output).read_text()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_jsonschema_field_include_all_keys(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "person.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--field-include-all-keys",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "general.py").read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "field_extras_field_include_all_keys.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "field_extras_field_include_all_keys_v2.py",
+        ),
+    ],
+)
+def test_main_jsonschema_field_extras_field_include_all_keys(
+    output_model: str, expected_output: str, tmp_path: Path
+) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "extras.json"),
+        "--output",
+        str(output_file),
+        "--output-model",
+        output_model,
+        "--input-file-type",
+        "jsonschema",
+        "--field-include-all-keys",
+        "--field-extra-keys-without-x-prefix",
+        "x-repr",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / expected_output).read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "field_extras_field_extra_keys.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "field_extras_field_extra_keys_v2.py",
+        ),
+    ],
+)
+def test_main_jsonschema_field_extras_field_extra_keys(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "extras.json"),
+        "--output",
+        str(output_file),
+        "--output-model",
+        output_model,
+        "--input-file-type",
+        "jsonschema",
+        "--field-extra-keys",
+        "key2",
+        "invalid-key-1",
+        "--field-extra-keys-without-x-prefix",
+        "x-repr",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / expected_output).read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "field_extras.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "field_extras_v2.py",
+        ),
+    ],
+)
+def test_main_jsonschema_field_extras(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "extras.json"),
+        "--output",
+        str(output_file),
+        "--output-model",
+        output_model,
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / expected_output).read_text()
+
+
+@pytest.mark.skipif(
+    not isort.__version__.startswith("4."),
+    reason="isort 5.x don't sort pydantic modules",
+)
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "custom_type_path.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "custom_type_path_pydantic_v2.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+def test_main_jsonschema_custom_type_path(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "custom_type_path.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--output-model",
+        output_model,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / expected_output).read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_custom_base_path(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "custom_base_path.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "custom_base_path.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_long_description(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "long_description.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "long_description.py").read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_long_description_wrap_string_literal(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "long_description.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--wrap-string-literal",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "long_description_wrap_string_literal.py").read_text()
+    )
+
+
+def test_version(capsys: pytest.CaptureFixture) -> None:
+    with pytest.raises(SystemExit) as e:
+        main(["--version"])
+    assert e.value.code == Exit.OK
+    captured = capsys.readouterr()
+    assert captured.out != "0.0.0\n"
+    assert not captured.err
+
+
+@freeze_time("2019-07-26")
+def test_jsonschema_pattern_properties(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "pattern_properties.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "pattern_properties.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_jsonschema_pattern_properties_field_constraints(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "pattern_properties.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--field-constraints",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "pattern_properties_field_constraints.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_jsonschema_titles(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "titles.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "titles.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_jsonschema_titles_use_title_as_name(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "titles.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--use-title-as-name",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "titles_use_title_as_name.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_jsonschema_without_titles_use_title_as_name(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "without_titles.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--use-title-as-name",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "without_titles_use_title_as_name.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_has_default_value(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "has_default_value.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "has_default_value.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_boolean_property(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "boolean_property.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "boolean_property.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_modular_default_enum_member(
+    tmp_path: Path,
+) -> None:
+    input_filename = JSON_SCHEMA_DATA_PATH / "modular_default_enum_member"
+    output_path = tmp_path / "model"
+
+    with freeze_time(TIMESTAMP):
+        main([
+            "--input",
+            str(input_filename),
+            "--output",
+            str(output_path),
+            "--set-default-enum-member",
+        ])
+    main_modular_dir = EXPECTED_JSON_SCHEMA_PATH / "modular_default_enum_member"
+    for path in main_modular_dir.rglob("*.py"):
+        result = output_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        assert result == path.read_text()
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] < "22",
+    reason="Installed black doesn't support Python version 3.10",
+)
+@freeze_time("2019-07-26")
+def test_main_use_union_operator(tmp_path: Path) -> None:
+    output_path = tmp_path / "model"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "external_files_in_directory"),
+        "--output",
+        str(output_path),
+        "--input-file-type",
+        "jsonschema",
+        "--use-union-operator",
+    ])
+    assert return_code == Exit.OK
+    main_nested_directory = EXPECTED_JSON_SCHEMA_PATH / "use_union_operator"
+
+    for path in main_nested_directory.rglob("*.py"):
+        result = output_path.joinpath(path.relative_to(main_nested_directory)).read_text()
+        assert result == path.read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.parametrize("as_module", [True, False])
+def test_treat_dot_as_module(as_module: bool, tmp_path: Path) -> None:
+    if as_module:
+        return_code: Exit = main([
+            "--input",
+            str(JSON_SCHEMA_DATA_PATH / "treat_dot_as_module"),
+            "--output",
+            str(tmp_path),
+            "--treat-dot-as-module",
+        ])
+    else:
+        return_code: Exit = main([
+            "--input",
+            str(JSON_SCHEMA_DATA_PATH / "treat_dot_as_module"),
+            "--output",
+            str(tmp_path),
+        ])
+    assert return_code == Exit.OK
+    path_extension = "treat_dot_as_module" if as_module else "treat_dot_not_as_module"
+    main_modular_dir = EXPECTED_JSON_SCHEMA_PATH / path_extension
+    for path in main_modular_dir.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        if as_module:
+            assert str(path.relative_to(main_modular_dir)).count(".") == 1
+        assert result == path.read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_duplicate_name(tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "duplicate_name"),
+        "--output",
+        str(tmp_path),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    main_modular_dir = EXPECTED_JSON_SCHEMA_PATH / "duplicate_name"
+    for path in main_modular_dir.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        assert result == path.read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_items_boolean(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "items_boolean.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "items_boolean.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_array_in_additional_properites(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "array_in_additional_properties.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "array_in_additional_properties.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_object_with_only_additional_properties(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "string_dict.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "string_dict.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_nullable_object(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "nullable_object.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "nullable_object.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_object_has_one_of(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "object_has_one_of.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "object_has_one_of.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_json_pointer_array(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "json_pointer_array.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "json_pointer_array.py").read_text()
+
+
+@pytest.mark.filterwarnings("error")
+def test_main_disable_warnings_config(capsys: pytest.CaptureFixture, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "person.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--use-union-operator",
+        "--target-python-version",
+        f"3.{MIN_VERSION}",
+        "--disable-warnings",
+    ])
+    captured = capsys.readouterr()
+    assert return_code == Exit.OK
+    assert not captured.err
+
+
+@pytest.mark.filterwarnings("error")
+def test_main_disable_warnings(capsys: pytest.CaptureFixture, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "all_of_with_object.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--disable-warnings",
+    ])
+    captured = capsys.readouterr()
+    assert return_code == Exit.OK
+    assert not captured.err
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_pattern_properties_by_reference(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "pattern_properties_by_reference.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "pattern_properties_by_reference.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_dataclass_field(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "user.json"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "dataclasses.dataclass",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "dataclass_field.py").read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_jsonschema_enum_root_literal(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "enum_in_root" / "enum_in_root.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--use-schema-description",
+        "--use-title-as-name",
+        "--field-constraints",
+        "--target-python-version",
+        "3.9",
+        "--allow-population-by-field-name",
+        "--strip-default-none",
+        "--use-default",
+        "--enum-field-as-literal",
+        "all",
+        "--snake-case-field",
+        "--collapse-root-models",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "root_in_enum.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_nullable_any_of(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "nullable_any_of.json"),
+        "--output",
+        str(output_file),
+        "--field-constraints",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "nullable_any_of.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_nullable_any_of_use_union_operator(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "nullable_any_of.json"),
+        "--output",
+        str(output_file),
+        "--field-constraints",
+        "--use-union-operator",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "nullable_any_of_use_union_operator.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_nested_all_of(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "nested_all_of.json"),
+        "--output",
+        str(output_file),
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "nested_all_of.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_all_of_any_of(tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "all_of_any_of"),
+        "--output",
+        str(tmp_path),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    all_of_any_of_dir = EXPECTED_JSON_SCHEMA_PATH / "all_of_any_of"
+    for path in all_of_any_of_dir.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(all_of_any_of_dir)).read_text()
+        assert result == path.read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_all_of_one_of(tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "all_of_one_of"),
+        "--output",
+        str(tmp_path),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    all_of_any_of_dir = EXPECTED_JSON_SCHEMA_PATH / "all_of_one_of"
+    for path in all_of_any_of_dir.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(all_of_any_of_dir)).read_text()
+        assert result == path.read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_null(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "null.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "null.py").read_text()
+
+
+@pytest.mark.skipif(
+    version.parse(black.__version__) < version.parse("23.3.0"),
+    reason="Require Black version 23.3.0 or later ",
+)
+@freeze_time("2019-07-26")
+def test_main_typed_dict_special_field_name_with_inheritance_model(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "special_field_name_with_inheritance_model.json"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "typing.TypedDict",
+        "--target-python-version",
+        "3.11",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "typed_dict_special_field_name_with_inheritance_model.py").read_text()
+    )
+
+
+@pytest.mark.skipif(
+    version.parse(black.__version__) < version.parse("23.3.0"),
+    reason="Require Black version 23.3.0 or later ",
+)
+@freeze_time("2019-07-26")
+def test_main_typed_dict_not_required_nullable(tmp_path: Path) -> None:
+    """Test main function writing to TypedDict, with combos of Optional/NotRequired."""
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "not_required_nullable.json"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "typing.TypedDict",
+        "--target-python-version",
+        "3.11",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "typed_dict_not_required_nullable.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_typed_dict_const(tmp_path: Path) -> None:
+    """Test main function writing to TypedDict with const fields."""
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "const.json"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "typing.TypedDict",
+        "--target-python-version",
+        "3.10",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "typed_dict_const.py").read_text()
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] < "24",
+    reason="Installed black doesn't support the new style",
+)
+@freeze_time("2019-07-26")
+def test_main_typed_dict_additional_properties(tmp_path: Path) -> None:
+    """Test main function writing to TypedDict with additional properties, and no other fields."""
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "string_dict.json"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "typing.TypedDict",
+        "--target-python-version",
+        "3.11",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "typed_dict_with_only_additional_properties.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_dataclass_const(tmp_path: Path) -> None:
+    """Test main function writing to dataclass with const fields."""
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "const.json"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "dataclasses.dataclass",
+        "--target-python-version",
+        "3.10",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "dataclass_const.py").read_text()
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic_v2.BaseModel",
+            "discriminator_literals.py",
+        ),
+        (
+            "msgspec.Struct",
+            "discriminator_literals_msgspec.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 24,
+    reason="Installed black doesn't support the new style",
+)
+def test_main_jsonschema_discriminator_literals(
+    output_model: str, expected_output: str, min_version: str, tmp_path: Path
+) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "discriminator_literals.json"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        output_model,
+        "--target-python",
+        min_version,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / expected_output).read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 24,
+    reason="Installed black doesn't support the new style",
+)
+def test_main_jsonschema_discriminator_literals_with_no_mapping(min_version: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "discriminator_no_mapping.json"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--target-python",
+        min_version,
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "discriminator_no_mapping.py").read_text()
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic_v2.BaseModel",
+            "discriminator_with_external_reference.py",
+        ),
+        (
+            "msgspec.Struct",
+            "discriminator_with_external_reference_msgspec.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+def test_main_jsonschema_external_discriminator(
+    output_model: str, expected_output: str, min_version: str, tmp_path: Path
+) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "discriminator_with_external_reference" / "inner_folder" / "schema.json"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        output_model,
+        "--target-python",
+        min_version,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / expected_output).read_text(), (
+        EXPECTED_JSON_SCHEMA_PATH / expected_output
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "discriminator_with_external_references_folder",
+        ),
+        (
+            "msgspec.Struct",
+            "discriminator_with_external_references_folder_msgspec",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+def test_main_jsonschema_external_discriminator_folder(
+    output_model: str, expected_output: str, min_version: str, tmp_path: Path
+) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "discriminator_with_external_reference"),
+        "--output",
+        str(tmp_path),
+        "--output-model-type",
+        output_model,
+        "--target-python",
+        min_version,
+    ])
+    assert return_code == Exit.OK
+    main_modular_dir = EXPECTED_JSON_SCHEMA_PATH / expected_output
+    for path in main_modular_dir.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        assert result == path.read_text(), path
+
+
+@freeze_time("2019-07-26")
+def test_main_duplicate_field_constraints(tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "duplicate_field_constraints"),
+        "--output",
+        str(tmp_path),
+        "--input-file-type",
+        "jsonschema",
+        "--collapse-root-models",
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+    ])
+    assert return_code == Exit.OK
+    main_modular_dir = EXPECTED_JSON_SCHEMA_PATH / "duplicate_field_constraints"
+    for path in main_modular_dir.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        assert result == path.read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_duplicate_field_constraints_msgspec(min_version: str, tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "duplicate_field_constraints"),
+        "--output",
+        str(tmp_path),
+        "--input-file-type",
+        "jsonschema",
+        "--output-model-type",
+        "msgspec.Struct",
+        "--target-python-version",
+        min_version,
+    ])
+    assert return_code == Exit.OK
+    main_modular_dir = EXPECTED_JSON_SCHEMA_PATH / "duplicate_field_constraints_msgspec"
+    for path in main_modular_dir.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        assert result == path.read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_dataclass_field_defs(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "user_defs.json"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "dataclasses.dataclass",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (
+        EXPECTED_JSON_SCHEMA_PATH / "dataclass_field.py"
+    ).read_text().replace("filename:  user.json", "filename:  user_defs.json")
+
+
+@freeze_time("2019-07-26")
+def test_main_dataclass_default(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "user_default.json"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "dataclasses.dataclass",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "dataclass_field_default.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_all_of_ref_self(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "all_of_ref_self.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "all_of_ref_self.py").read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_array_field_constraints(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "array_field_constraints.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--target-python-version",
+        "3.9",
+        "--field-constraints",
+        "--collapse-root-models",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "array_field_constraints.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_all_of_use_default(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "all_of_default.json"),
+        "--output",
+        str(output_file),
+        "--use-default",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "all_of_use_default.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_root_one_of(tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "root_one_of"),
+        "--output",
+        str(tmp_path),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    expected_directory = EXPECTED_JSON_SCHEMA_PATH / "root_one_of"
+    for path in expected_directory.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(expected_directory)).read_text()
+        assert result == path.read_text()
+
+
+@freeze_time("2019-07-26")
+def test_one_of_with_sub_schema_array_item(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "one_of_with_sub_schema_array_item.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "one_of_with_sub_schema_array_item.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_with_custom_formatters(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    formatter_config = {
+        "license_file": str(Path(__file__).parent.parent.parent / "data/python/custom_formatters/license_example.txt")
+    }
+    formatter_config_path = tmp_path / "formatter_config"
+    formatter_config_path.write_text(json.dumps(formatter_config))
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "person.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--custom-formatters",
+        "tests.data.python.custom_formatters.add_license",
+        "--custom-formatters-kwargs",
+        str(formatter_config_path),
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "custom_formatters.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_imports_correct(tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "imports_correct"),
+        "--output",
+        str(tmp_path),
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+    ])
+    assert return_code == Exit.OK
+    main_modular_dir = EXPECTED_JSON_SCHEMA_PATH / "imports_correct"
+    for path in main_modular_dir.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        assert result == path.read_text()
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic_v2.BaseModel",
+            "duration_pydantic_v2.py",
+        ),
+        (
+            "msgspec.Struct",
+            "duration_msgspec.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+def test_main_jsonschema_duration(output_model: str, expected_output: str, min_version: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "duration.json"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        output_model,
+        "--target-python",
+        min_version,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / expected_output).read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 24,
+    reason="Installed black doesn't support the new style",
+)
+def test_main_jsonschema_keyword_only_msgspec(min_version: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "discriminator_literals.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--output-model-type",
+        "msgspec.Struct",
+        "--keyword-only",
+        "--target-python-version",
+        min_version,
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "discriminator_literals_msgspec_keyword_only.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 24,
+    reason="Installed black doesn't support the new style",
+)
+def test_main_jsonschema_keyword_only_msgspec_with_extra_data(min_version: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "discriminator_literals.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--output-model-type",
+        "msgspec.Struct",
+        "--keyword-only",
+        "--target-python-version",
+        min_version,
+        "--extra-template-data",
+        str(JSON_SCHEMA_DATA_PATH / "extra_data_msgspec.json"),
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "discriminator_literals_msgspec_keyword_only_omit_defaults.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 24,
+    reason="Installed black doesn't support the new style",
+)
+def test_main_jsonschema_openapi_keyword_only_msgspec_with_extra_data(tmp_path: Path) -> None:
+    extra_data = json.loads((JSON_SCHEMA_DATA_PATH / "extra_data_msgspec.json").read_text())
+    output_file: Path = tmp_path / "output.py"
+    generate(
+        input_=JSON_SCHEMA_DATA_PATH / "discriminator_literals.json",
+        output=output_file,
+        input_file_type=InputFileType.JsonSchema,
+        output_model_type=DataModelType.MsgspecStruct,
+        keyword_only=True,
+        target_python_version=PythonVersionMin,
+        extra_template_data=defaultdict(dict, extra_data),
+        # Following values are implied by `msgspec.Struct` in the CLI
+        use_annotated=True,
+        field_constraints=True,
+    )
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "discriminator_literals_msgspec_keyword_only_omit_defaults.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_invalid_import_name(tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "invalid_import_name"),
+        "--output",
+        str(tmp_path),
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+    ])
+    assert return_code == Exit.OK
+    main_modular_dir = EXPECTED_JSON_SCHEMA_PATH / "invalid_import_name"
+    for path in main_modular_dir.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        assert result == path.read_text()
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic_v2.BaseModel",
+            "field_has_same_name_v2.py",
+        ),
+        (
+            "pydantic.BaseModel",
+            "field_has_same_name.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+def test_main_jsonschema_field_has_same_name(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "field_has_same_name.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--output-model-type",
+        output_model,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / expected_output).read_text()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_jsonschema_required_and_any_of_required(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "required_and_any_of_required.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_SCHEMA_PATH / "required_and_any_of_required.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_json_pointer_escaped_segments(tmp_path: Path) -> None:
+    schema = {
+        "definitions": {
+            "foo/bar": {"type": "object", "properties": {"value": {"type": "string"}}},
+            "baz~qux": {"type": "object", "properties": {"value": {"type": "integer"}}},
+        },
+        "properties": {
+            "foo_bar": {"$ref": "#/definitions/foo~1bar"},
+            "baz_qux": {"$ref": "#/definitions/baz~0qux"},
+        },
+        "type": "object",
+    }
+    expected = (
+        "# generated by datamodel-codegen:\n"
+        "#   filename: input.json\n"
+        "#   timestamp: 2019-07-26T00:00:00+00:00\n\n"
+        "from __future__ import annotations\n\n"
+        "from typing import Optional\n\n"
+        "from pydantic import BaseModel\n\n"
+        "class FooBar(BaseModel):\n    value: Optional[str] = None\n\n"
+        "class BazQux(BaseModel):\n    value: Optional[int] = None\n\n"
+        "class Baz0qux(BaseModel):\n    value: Optional[int] = None\n\n"
+        "class Foo1bar(BaseModel):\n    value: Optional[str] = None\n\n"
+        "class Model(BaseModel):\n    foo_bar: Optional[Foo1bar] = None\n    baz_qux: Optional[Baz0qux] = None\n"
+    )
+
+    input_file = tmp_path / "input.json"
+    output_file = tmp_path / "output.py"
+    input_file.write_text(json.dumps(schema))
+    return_code: Exit = main([
+        "--input",
+        str(input_file),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    result = output_file.read_text()
+    # Normalize whitespace for comparison
+    assert "".join(result.split()) == "".join(expected.split())
+
+
+@freeze_time("2019-07-26")
+def test_main_json_pointer_percent_encoded_segments(tmp_path: Path) -> None:
+    schema = {
+        "definitions": {
+            "foo/bar": {"type": "object", "properties": {"value": {"type": "string"}}},
+            "baz~qux": {"type": "object", "properties": {"value": {"type": "integer"}}},
+            "space key": {"type": "object", "properties": {"value": {"type": "boolean"}}},
+        },
+        "properties": {
+            "foo_bar": {"$ref": "#/definitions/foo%2Fbar"},
+            "baz_qux": {"$ref": "#/definitions/baz%7Equx"},
+            "space_key": {"$ref": "#/definitions/space%20key"},
+        },
+        "type": "object",
+    }
+    expected = (
+        "# generated by datamodel-codegen:\n"
+        "#   filename: input.json\n"
+        "#   timestamp: 2019-07-26T00:00:00+00:00\n\n"
+        "from __future__ import annotations\n\n"
+        "from typing import Optional\n\n"
+        "from pydantic import BaseModel\n\n"
+        "class FooBar(BaseModel):\n    value: Optional[str] = None\n\n"
+        "class BazQux(BaseModel):\n    value: Optional[int] = None\n\n"
+        "class SpaceKey(BaseModel):\n    value: Optional[bool] = None\n\n"
+        "class Baz7Equx(BaseModel):\n    value: Optional[int] = None\n\n"
+        "class Foo2Fbar(BaseModel):\n    value: Optional[str] = None\n\n"
+        "class Space20key(BaseModel):\n    value: Optional[bool] = None\n\n"
+        "class Model(BaseModel):\n    foo_bar: Optional[Foo2Fbar] = None\n"
+        "    baz_qux: Optional[Baz7Equx] = None\n"
+        "    space_key: Optional[Space20key] = None\n"
+    )
+
+    input_file = tmp_path / "input.json"
+    output_file = tmp_path / "output.py"
+    input_file.write_text(json.dumps(schema))
+    return_code: Exit = main([
+        "--input",
+        str(input_file),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    result = output_file.read_text()
+    # Normalize whitespace for comparison
+    assert "".join(result.split()) == "".join(expected.split())
+
+
+@pytest.mark.parametrize(
+    ("extra_fields", "output_model", "expected_output"),
+    [
+        (
+            "allow",
+            "pydantic.BaseModel",
+            "extra_fields_allow.py",
+        ),
+        (
+            "forbid",
+            "pydantic.BaseModel",
+            "extra_fields_forbid.py",
+        ),
+        (
+            "ignore",
+            "pydantic.BaseModel",
+            "extra_fields_ignore.py",
+        ),
+        (
+            "allow",
+            "pydantic_v2.BaseModel",
+            "extra_fields_v2_allow.py",
+        ),
+        (
+            "forbid",
+            "pydantic_v2.BaseModel",
+            "extra_fields_v2_forbid.py",
+        ),
+        (
+            "ignore",
+            "pydantic_v2.BaseModel",
+            "extra_fields_v2_ignore.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+def test_main_extra_fields(extra_fields: str, output_model: str, expected_output: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "extra_fields.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--extra-fields",
+        extra_fields,
+        "--output-model-type",
+        output_model,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / expected_output).read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_same_name_objects(tmp_path: Path) -> None:
+    """
+    See: https://github.com/koxudaxi/datamodel-code-generator/issues/2460
+    """
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "same_name_objects.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_SCHEMA_PATH / "same_name_objects.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_jsonschema_forwarding_reference_collapse_root(tmp_path: Path) -> None:
+    """
+    See: https://github.com/koxudaxi/datamodel-code-generator/issues/1466
+    """
+    return_code: Exit = main([
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "forwarding_reference"),
+        "--output",
+        str(tmp_path),
+        "--input-file-type",
+        "jsonschema",
+        "--collapse-root-models",
+    ])
+    assert return_code == Exit.OK
+    main_modular_dir = EXPECTED_JSON_SCHEMA_PATH / "forwarding_reference"
+    for path in main_modular_dir.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        assert result == path.read_text()
diff -pruN 0.26.4-3/tests/main/openapi/test_main_openapi.py 0.34.0-1/tests/main/openapi/test_main_openapi.py
--- 0.26.4-3/tests/main/openapi/test_main_openapi.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/main/openapi/test_main_openapi.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,2588 @@
+from __future__ import annotations
+
+import contextlib
+import json
+import platform
+import shutil
+from argparse import Namespace
+from collections import defaultdict
+from pathlib import Path
+from typing import TYPE_CHECKING
+from unittest.mock import Mock, call
+
+import black
+import isort
+import pydantic
+import pytest
+from freezegun import freeze_time
+from packaging import version
+
+with contextlib.suppress(ImportError):
+    pass
+
+from datamodel_code_generator import (
+    MIN_VERSION,
+    DataModelType,
+    InputFileType,
+    OpenAPIScope,
+    PythonVersionMin,
+    chdir,
+    generate,
+    get_version,
+    inferred_message,
+)
+from datamodel_code_generator.__main__ import Exit, main
+from tests.main.test_main_general import DATA_PATH, EXPECTED_MAIN_PATH, TIMESTAMP
+
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+OPEN_API_DATA_PATH: Path = DATA_PATH / "openapi"
+EXPECTED_OPENAPI_PATH: Path = EXPECTED_MAIN_PATH / "openapi"
+
+
+@pytest.fixture(autouse=True)
+def reset_namespace(monkeypatch: pytest.MonkeyPatch) -> None:
+    namespace_ = Namespace(no_color=False)
+    monkeypatch.setattr("datamodel_code_generator.__main__.namespace", namespace_)
+    monkeypatch.setattr("datamodel_code_generator.arguments.namespace", namespace_)
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "general.py").read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_discriminator_enum(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "discriminator_enum.yaml"),
+        "--output",
+        str(output_file),
+        "--target-python-version",
+        "3.10",
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--input-file-type",
+        "openapi",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "discriminator" / "enum.py").read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_discriminator_enum_duplicate(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "discriminator_enum_duplicate.yaml"),
+        "--output",
+        str(output_file),
+        "--target-python-version",
+        "3.10",
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--input-file-type",
+        "openapi",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "discriminator" / "enum_duplicate.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_discriminator_with_properties(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "discriminator_with_properties.yaml"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+    ])
+    assert return_code == Exit.OK
+
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "discriminator" / "with_properties.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_pydantic_basemodel(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "pydantic.BaseModel",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "general.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_base_class(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    shutil.copy(DATA_PATH / "pyproject.toml", tmp_path / "pyproject.toml")
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--base-class",
+        "custom_module.Base",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "base_class.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_target_python_version(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--target-python-version",
+        f"3.{MIN_VERSION}",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "target_python_version.py").read_text()
+
+
+@pytest.mark.benchmark
+def test_main_modular(tmp_path: Path) -> None:
+    """Test main function on modular file."""
+    input_filename = OPEN_API_DATA_PATH / "modular.yaml"
+    output_path = tmp_path / "model"
+
+    with freeze_time(TIMESTAMP):
+        main(["--input", str(input_filename), "--output", str(output_path)])
+    main_modular_dir = EXPECTED_OPENAPI_PATH / "modular"
+    for path in main_modular_dir.rglob("*.py"):
+        result = output_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        assert result == path.read_text()
+
+
+def test_main_modular_reuse_model(tmp_path: Path) -> None:
+    """Test main function on modular file."""
+    input_filename = OPEN_API_DATA_PATH / "modular.yaml"
+    output_path = tmp_path / "model"
+
+    with freeze_time(TIMESTAMP):
+        main([
+            "--input",
+            str(input_filename),
+            "--output",
+            str(output_path),
+            "--reuse-model",
+        ])
+    main_modular_dir = EXPECTED_OPENAPI_PATH / "modular_reuse_model"
+    for path in main_modular_dir.rglob("*.py"):
+        result = output_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        assert result == path.read_text()
+
+
+def test_main_modular_no_file() -> None:
+    """Test main function on modular file with no output name."""
+
+    input_filename = OPEN_API_DATA_PATH / "modular.yaml"
+
+    assert main(["--input", str(input_filename)]) == Exit.ERROR
+
+
+def test_main_modular_filename(tmp_path: Path) -> None:
+    """Test main function on modular file with filename."""
+    input_filename = OPEN_API_DATA_PATH / "modular.yaml"
+    output_filename = tmp_path / "model.py"
+
+    assert main(["--input", str(input_filename), "--output", str(output_filename)]) == Exit.ERROR
+
+
+def test_main_openapi_no_file(capsys: pytest.CaptureFixture, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test main function on non-modular file with no output name."""
+    monkeypatch.chdir(tmp_path)
+    input_filename = OPEN_API_DATA_PATH / "api.yaml"
+
+    with freeze_time(TIMESTAMP):
+        main(["--input", str(input_filename)])
+
+    captured = capsys.readouterr()
+    assert captured.out == (EXPECTED_OPENAPI_PATH / "no_file.py").read_text()
+    assert captured.err == inferred_message.format("openapi") + "\n"
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "extra_template_data_config.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "extra_template_data_config_pydantic_v2.py",
+        ),
+    ],
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_extra_template_data_config(
+    capsys: pytest.CaptureFixture,
+    output_model: str,
+    expected_output: str,
+    tmp_path: Path,
+    monkeypatch: pytest.MonkeyPatch,
+) -> None:
+    """Test main function with custom config data in extra template."""
+
+    monkeypatch.chdir(tmp_path)
+    input_filename = OPEN_API_DATA_PATH / "api.yaml"
+    extra_template_data = OPEN_API_DATA_PATH / "extra_data.json"
+
+    with freeze_time(TIMESTAMP):
+        main([
+            "--input",
+            str(input_filename),
+            "--extra-template-data",
+            str(extra_template_data),
+            "--output-model",
+            output_model,
+        ])
+
+    captured = capsys.readouterr()
+    assert captured.out == (EXPECTED_OPENAPI_PATH / expected_output).read_text()
+    assert captured.err == inferred_message.format("openapi") + "\n"
+
+
+def test_main_custom_template_dir_old_style(
+    capsys: pytest.CaptureFixture, tmp_path: Path, monkeypatch: pytest.MonkeyPatch
+) -> None:
+    """Test main function with custom template directory."""
+
+    monkeypatch.chdir(tmp_path)
+    input_filename = OPEN_API_DATA_PATH / "api.yaml"
+    custom_template_dir = DATA_PATH / "templates_old_style"
+    extra_template_data = OPEN_API_DATA_PATH / "extra_data.json"
+
+    with freeze_time(TIMESTAMP):
+        main([
+            "--input",
+            str(input_filename),
+            "--custom-template-dir",
+            str(custom_template_dir),
+            "--extra-template-data",
+            str(extra_template_data),
+        ])
+
+    captured = capsys.readouterr()
+    assert captured.out == (EXPECTED_OPENAPI_PATH / "custom_template_dir.py").read_text()
+    assert captured.err == inferred_message.format("openapi") + "\n"
+
+
+def test_main_openapi_custom_template_dir(
+    capsys: pytest.CaptureFixture, tmp_path: Path, monkeypatch: pytest.MonkeyPatch
+) -> None:
+    monkeypatch.chdir(tmp_path)
+    """Test main function with custom template directory."""
+
+    input_filename = OPEN_API_DATA_PATH / "api.yaml"
+    custom_template_dir = DATA_PATH / "templates"
+    extra_template_data = OPEN_API_DATA_PATH / "extra_data.json"
+
+    with freeze_time(TIMESTAMP):
+        main([
+            "--input",
+            str(input_filename),
+            "--custom-template-dir",
+            str(custom_template_dir),
+            "--extra-template-data",
+            str(extra_template_data),
+        ])
+
+    captured = capsys.readouterr()
+    assert captured.out == (EXPECTED_OPENAPI_PATH / "custom_template_dir.py").read_text()
+    assert captured.err == inferred_message.format("openapi") + "\n"
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+@freeze_time("2019-07-26")
+def test_pyproject(tmp_path: Path) -> None:
+    if platform.system() == "Windows":
+
+        def get_path(path: str) -> str:
+            return str(path).replace("\\", "\\\\")
+
+    else:
+
+        def get_path(path: str) -> str:
+            return str(path)
+
+    with chdir(tmp_path):
+        output_file: Path = tmp_path / "output.py"
+        pyproject_toml_path = Path(DATA_PATH) / "project" / "pyproject.toml"
+        pyproject_toml = (
+            pyproject_toml_path.read_text()
+            .replace("INPUT_PATH", get_path(OPEN_API_DATA_PATH / "api.yaml"))
+            .replace("OUTPUT_PATH", get_path(output_file))
+            .replace("ALIASES_PATH", get_path(OPEN_API_DATA_PATH / "empty_aliases.json"))
+            .replace(
+                "EXTRA_TEMPLATE_DATA_PATH",
+                get_path(OPEN_API_DATA_PATH / "empty_data.json"),
+            )
+            .replace("CUSTOM_TEMPLATE_DIR_PATH", get_path(tmp_path))
+        )
+        (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+        return_code: Exit = main([])
+        assert return_code == Exit.OK
+        assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "pyproject.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_pyproject_not_found(tmp_path: Path) -> None:
+    with chdir(tmp_path):
+        output_file: Path = tmp_path / "output.py"
+        return_code: Exit = main([
+            "--input",
+            str(OPEN_API_DATA_PATH / "api.yaml"),
+            "--output",
+            str(output_file),
+        ])
+        assert return_code == Exit.OK
+        assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "pyproject_not_found.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_stdin(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    monkeypatch.setattr("sys.stdin", (OPEN_API_DATA_PATH / "api.yaml").open())
+    return_code: Exit = main([
+        "--output",
+        str(output_file),
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "stdin.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_validation(mocker: MockerFixture, tmp_path: Path) -> None:
+    mock_prance = mocker.patch("prance.BaseParser")
+
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--validation",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "validation.py").read_text()
+    mock_prance.assert_called_once()
+
+
+@freeze_time("2019-07-26")
+def test_validation_failed(mocker: MockerFixture, tmp_path: Path) -> None:
+    mock_prance = mocker.patch("prance.BaseParser", side_effect=Exception("error"))
+    output_file: Path = tmp_path / "output.py"
+    assert (
+        main([
+            "--input",
+            str(OPEN_API_DATA_PATH / "invalid.yaml"),
+            "--output",
+            str(output_file),
+            "--input-file-type",
+            "openapi",
+            "--validation",
+        ])
+        == Exit.ERROR
+    )
+    mock_prance.assert_called_once()
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output", "args"),
+    [
+        ("pydantic.BaseModel", "with_field_constraints.py", []),
+        (
+            "pydantic.BaseModel",
+            "with_field_constraints_use_unique_items_as_set.py",
+            ["--use-unique-items-as-set"],
+        ),
+        ("pydantic_v2.BaseModel", "with_field_constraints_pydantic_v2.py", []),
+        (
+            "pydantic_v2.BaseModel",
+            "with_field_constraints_pydantic_v2_use_generic_container_types.py",
+            ["--use-generic-container-types"],
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "with_field_constraints_pydantic_v2_use_generic_container_types_set.py",
+            ["--use-generic-container-types", "--use-unique-items-as-set"],
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "with_field_constraints_pydantic_v2_use_standard_collections.py",
+            [
+                "--use-standard-collections",
+            ],
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "with_field_constraints_pydantic_v2_use_standard_collections_set.py",
+            ["--use-standard-collections", "--use-unique-items-as-set"],
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+def test_main_with_field_constraints(output_model: str, expected_output: str, args: list[str], tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api_constrained.yaml"),
+        "--output",
+        str(output_file),
+        "--field-constraints",
+        "--output-model-type",
+        output_model,
+        *args,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / expected_output).read_text()
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "without_field_constraints.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "without_field_constraints_pydantic_v2.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+def test_main_without_field_constraints(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api_constrained.yaml"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        output_model,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / expected_output).read_text()
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "with_aliases.py",
+        ),
+        (
+            "msgspec.Struct",
+            "with_aliases_msgspec.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_with_aliases(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--aliases",
+        str(OPEN_API_DATA_PATH / "aliases.json"),
+        "--target-python",
+        "3.9",
+        "--output-model",
+        output_model,
+        "--output",
+        str(output_file),
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / expected_output).read_text()
+
+
+def test_main_with_bad_aliases(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--aliases",
+        str(OPEN_API_DATA_PATH / "not.json"),
+        "--output",
+        str(output_file),
+    ])
+    assert return_code == Exit.ERROR
+
+
+def test_main_with_more_bad_aliases(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--aliases",
+        str(OPEN_API_DATA_PATH / "list.json"),
+        "--output",
+        str(output_file),
+    ])
+    assert return_code == Exit.ERROR
+
+
+def test_main_with_bad_extra_data(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--extra-template-data",
+        str(OPEN_API_DATA_PATH / "not.json"),
+        "--output",
+        str(output_file),
+    ])
+    assert return_code == Exit.ERROR
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_with_snake_case_field(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--snake-case-field",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "with_snake_case_field.py").read_text()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_with_strip_default_none(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--strip-default-none",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "with_strip_default_none.py").read_text()
+
+
+def test_disable_timestamp(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--disable-timestamp",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "disable_timestamp.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_enable_version_header(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--enable-version-header",
+    ])
+    assert return_code == Exit.OK
+    expected = (EXPECTED_OPENAPI_PATH / "enable_version_header.py").read_text()
+    expected = expected.replace("#   version:   0.0.0", f"#   version:   {get_version()}")
+    assert output_file.read_text(encoding="utf-8") == expected
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "allow_population_by_field_name.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "allow_population_by_field_name_pydantic_v2.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_allow_population_by_field_name(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--allow-population-by-field-name",
+        "--output-model-type",
+        output_model,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / expected_output).read_text()
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "allow_extra_fields.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "allow_extra_fields_pydantic_v2.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_allow_extra_fields(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--allow-extra-fields",
+        "--output-model-type",
+        output_model,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / expected_output).read_text()
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "enable_faux_immutability.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "enable_faux_immutability_pydantic_v2.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_enable_faux_immutability(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--enable-faux-immutability",
+        "--output-model-type",
+        output_model,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / expected_output).read_text()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_use_default(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--use-default",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "use_default.py").read_text()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_force_optional(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--force-optional",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "force_optional.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_with_exclusive(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "exclusive.yaml"),
+        "--output",
+        str(output_file),
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "with_exclusive.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_subclass_enum(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "subclass_enum.json"),
+        "--output",
+        str(output_file),
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "subclass_enum.py").read_text()
+
+
+def test_main_use_standard_collections(tmp_path: Path) -> None:
+    input_filename = OPEN_API_DATA_PATH / "modular.yaml"
+    output_path = tmp_path / "model"
+
+    with freeze_time(TIMESTAMP):
+        main([
+            "--input",
+            str(input_filename),
+            "--output",
+            str(output_path),
+            "--use-standard-collections",
+        ])
+    main_use_standard_collections_dir = EXPECTED_OPENAPI_PATH / "use_standard_collections"
+    for path in main_use_standard_collections_dir.rglob("*.py"):
+        result = output_path.joinpath(path.relative_to(main_use_standard_collections_dir)).read_text()
+        assert result == path.read_text()
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_use_generic_container_types(tmp_path: Path) -> None:
+    input_filename = OPEN_API_DATA_PATH / "modular.yaml"
+    output_path = tmp_path / "model"
+
+    with freeze_time(TIMESTAMP):
+        main([
+            "--input",
+            str(input_filename),
+            "--output",
+            str(output_path),
+            "--use-generic-container-types",
+        ])
+    main_use_generic_container_types_dir = EXPECTED_OPENAPI_PATH / "use_generic_container_types"
+    for path in main_use_generic_container_types_dir.rglob("*.py"):
+        result = output_path.joinpath(path.relative_to(main_use_generic_container_types_dir)).read_text()
+        assert result == path.read_text()
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.benchmark
+def test_main_use_generic_container_types_standard_collections(
+    tmp_path: Path,
+) -> None:
+    input_filename = OPEN_API_DATA_PATH / "modular.yaml"
+    output_path = tmp_path / "model"
+
+    with freeze_time(TIMESTAMP):
+        main([
+            "--input",
+            str(input_filename),
+            "--output",
+            str(output_path),
+            "--use-generic-container-types",
+            "--use-standard-collections",
+        ])
+    main_use_generic_container_types_standard_collections_dir = (
+        EXPECTED_OPENAPI_PATH / "use_generic_container_types_standard_collections"
+    )
+    for path in main_use_generic_container_types_standard_collections_dir.rglob("*.py"):
+        result = output_path.joinpath(
+            path.relative_to(main_use_generic_container_types_standard_collections_dir)
+        ).read_text()
+        assert result == path.read_text()
+
+
+def test_main_original_field_name_delimiter_without_snake_case_field(capsys: pytest.CaptureFixture) -> None:
+    input_filename = OPEN_API_DATA_PATH / "modular.yaml"
+
+    return_code: Exit = main([
+        "--input",
+        str(input_filename),
+        "--original-field-name-delimiter",
+        "-",
+    ])
+    captured = capsys.readouterr()
+    assert return_code == Exit.ERROR
+    assert captured.err == "`--original-field-name-delimiter` can not be used without `--snake-case-field`.\n"
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.parametrize(
+    ("output_model", "expected_output", "date_type"),
+    [
+        ("pydantic.BaseModel", "datetime.py", "AwareDatetime"),
+        ("pydantic_v2.BaseModel", "datetime_pydantic_v2.py", "AwareDatetime"),
+        ("pydantic_v2.BaseModel", "datetime_pydantic_v2_datetime.py", "datetime"),
+        ("dataclasses.dataclass", "datetime_dataclass.py", "datetime"),
+        ("msgspec.Struct", "datetime_msgspec.py", "datetime"),
+    ],
+)
+def test_main_openapi_aware_datetime(output_model: str, expected_output: str, date_type: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "datetime.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--output-datetime-class",
+        date_type,
+        "--output-model",
+        output_model,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / expected_output).read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "datetime.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "datetime_pydantic_v2.py",
+        ),
+    ],
+)
+def test_main_openapi_datetime(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "datetime.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--output-model",
+        output_model,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / expected_output).read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_models_not_found(capsys: pytest.CaptureFixture, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "no_components.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+    ])
+    captured = capsys.readouterr()
+    assert return_code == Exit.ERROR
+    assert captured.err == "Models not found in the input data\n"
+
+
+@pytest.mark.skipif(
+    version.parse(pydantic.VERSION) < version.parse("1.9.0"),
+    reason="Require Pydantic version 1.9.0 or later ",
+)
+@freeze_time("2019-07-26")
+def test_main_openapi_enum_models_as_literal_one(min_version: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "enum_models.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--enum-field-as-literal",
+        "one",
+        "--target-python-version",
+        min_version,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "enum_models" / "one.py").read_text()
+
+
+@pytest.mark.skipif(
+    version.parse(pydantic.VERSION) < version.parse("1.9.0"),
+    reason="Require Pydantic version 1.9.0 or later ",
+)
+@freeze_time("2019-07-26")
+def test_main_openapi_use_one_literal_as_default(min_version: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "enum_models.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--enum-field-as-literal",
+        "one",
+        "--target-python-version",
+        min_version,
+        "--use-one-literal-as-default",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "enum_models" / "one_literal_as_default.py").read_text()
+    )
+
+
+@pytest.mark.skipif(
+    version.parse(pydantic.VERSION) < version.parse("1.9.0"),
+    reason="Require Pydantic version 1.9.0 or later ",
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+@freeze_time("2019-07-26")
+def test_main_openapi_enum_models_as_literal_all(min_version: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "enum_models.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--enum-field-as-literal",
+        "all",
+        "--target-python-version",
+        min_version,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "enum_models" / "all.py").read_text()
+
+
+@pytest.mark.skipif(
+    version.parse(pydantic.VERSION) < version.parse("1.9.0"),
+    reason="Require Pydantic version 1.9.0 or later ",
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+@freeze_time("2019-07-26")
+def test_main_openapi_enum_models_as_literal(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "enum_models.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--enum-field-as-literal",
+        "all",
+        "--target-python-version",
+        f"3.{MIN_VERSION}",
+    ])
+
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "enum_models" / "as_literal.py").read_text()
+    )
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_openapi_all_of_required(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "allof_required.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "allof_required.py").read_text()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_openapi_nullable(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "nullable.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "nullable.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_nullable_strict_nullable(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "nullable.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--strict-nullable",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "nullable_strict_nullable.py").read_text()
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "general.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "pydantic_v2.py",
+        ),
+        (
+            "msgspec.Struct",
+            "msgspec_pattern.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_pattern(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "pattern.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--target-python",
+        "3.9",
+        "--output-model-type",
+        output_model,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (
+        EXPECTED_OPENAPI_PATH / "pattern" / expected_output
+    ).read_text().replace("pattern.json", "pattern.yaml")
+
+
+@pytest.mark.parametrize(
+    ("expected_output", "args"),
+    [
+        ("pattern_with_lookaround_pydantic_v2.py", []),
+        (
+            "pattern_with_lookaround_pydantic_v2_field_constraints.py",
+            ["--field-constraints"],
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] < "22",
+    reason="Installed black doesn't support Python version 3.10",
+)
+def test_main_openapi_pattern_with_lookaround_pydantic_v2(
+    expected_output: str, args: list[str], tmp_path: Path
+) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "pattern_lookaround.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--target-python",
+        "3.9",
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        *args,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / expected_output).read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_generate_custom_class_name_generator_modular(
+    tmp_path: Path,
+) -> None:
+    output_path = tmp_path / "model"
+    main_modular_custom_class_name_dir = EXPECTED_OPENAPI_PATH / "modular_custom_class_name"
+
+    def custom_class_name_generator(name: str) -> str:
+        return f"Custom{name[0].upper() + name[1:]}"
+
+    with freeze_time(TIMESTAMP):
+        input_ = (OPEN_API_DATA_PATH / "modular.yaml").relative_to(Path.cwd())
+        assert not input_.is_absolute()
+        generate(
+            input_=input_,
+            input_file_type=InputFileType.OpenAPI,
+            output=output_path,
+            custom_class_name_generator=custom_class_name_generator,
+        )
+
+        for path in main_modular_custom_class_name_dir.rglob("*.py"):
+            result = output_path.joinpath(path.relative_to(main_modular_custom_class_name_dir)).read_text()
+            assert result == path.read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_http_openapi(mocker: MockerFixture, tmp_path: Path) -> None:
+    def get_mock_response(path: str) -> Mock:
+        mock = mocker.Mock()
+        mock.text = (OPEN_API_DATA_PATH / path).read_text()
+        return mock
+
+    httpx_get_mock = mocker.patch(
+        "httpx.get",
+        side_effect=[
+            get_mock_response("refs.yaml"),
+            get_mock_response("definitions.yaml"),
+        ],
+    )
+
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--url",
+        "https://example.com/refs.yaml",
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "http_refs.py").read_text()
+    httpx_get_mock.assert_has_calls([
+        call(
+            "https://example.com/refs.yaml",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+        call(
+            "https://teamdigitale.github.io/openapi/0.0.6/definitions.yaml",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+    ])
+
+
+@freeze_time("2019-07-26")
+def test_main_disable_appending_item_suffix(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api_constrained.yaml"),
+        "--output",
+        str(output_file),
+        "--field-constraints",
+        "--disable-appending-item-suffix",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "disable_appending_item_suffix.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_body_and_parameters(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "body_and_parameters.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--openapi-scopes",
+        "paths",
+        "schemas",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "body_and_parameters" / "general.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_body_and_parameters_remote_ref(mocker: MockerFixture, tmp_path: Path) -> None:
+    input_path = OPEN_API_DATA_PATH / "body_and_parameters_remote_ref.yaml"
+    person_response = mocker.Mock()
+    person_response.text = input_path.read_text()
+    httpx_get_mock = mocker.patch("httpx.get", side_effect=[person_response])
+
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(input_path),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--openapi-scopes",
+        "paths",
+        "schemas",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "body_and_parameters" / "remote_ref.py").read_text()
+    )
+    httpx_get_mock.assert_has_calls([
+        call(
+            "https://schema.example",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+    ])
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_body_and_parameters_only_paths(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "body_and_parameters.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--openapi-scopes",
+        "paths",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "body_and_parameters" / "only_paths.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_body_and_parameters_only_schemas(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "body_and_parameters.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--openapi-scopes",
+        "schemas",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "body_and_parameters" / "only_schemas.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_content_in_parameters(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "content_in_parameters.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "content_in_parameters.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_oas_response_reference(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "oas_response_reference.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--openapi-scopes",
+        "paths",
+        "schemas",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "oas_response_reference.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_json_pointer(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "json_pointer.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "json_pointer.py").read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        ("pydantic.BaseModel", "use_annotated_with_field_constraints.py"),
+        (
+            "pydantic_v2.BaseModel",
+            "use_annotated_with_field_constraints_pydantic_v2.py",
+        ),
+    ],
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_use_annotated_with_field_constraints(
+    output_model: str, expected_output: str, min_version: str, tmp_path: Path
+) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api_constrained.yaml"),
+        "--output",
+        str(output_file),
+        "--field-constraints",
+        "--use-annotated",
+        "--target-python-version",
+        min_version,
+        "--output-model",
+        output_model,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / expected_output).read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_nested_enum(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "nested_enum.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "nested_enum.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_openapi_special_yaml_keywords(mocker: MockerFixture, tmp_path: Path) -> None:
+    mock_prance = mocker.patch("prance.BaseParser")
+
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "special_yaml_keywords.yaml"),
+        "--output",
+        str(output_file),
+        "--validation",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "special_yaml_keywords.py").read_text()
+    mock_prance.assert_called_once()
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] < "22",
+    reason="Installed black doesn't support Python version 3.10",
+)
+@freeze_time("2019-07-26")
+def test_main_openapi_nullable_use_union_operator(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "nullable.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--use-union-operator",
+        "--strict-nullable",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "nullable_strict_nullable_use_union_operator.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_external_relative_ref(tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "external_relative_ref" / "model_b"),
+        "--output",
+        str(tmp_path),
+    ])
+    assert return_code == Exit.OK
+    main_modular_dir = EXPECTED_OPENAPI_PATH / "external_relative_ref"
+    for path in main_modular_dir.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        assert result == path.read_text()
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_collapse_root_models(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "not_real_string.json"),
+        "--output",
+        str(output_file),
+        "--collapse-root-models",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "collapse_root_models.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_collapse_root_models_field_constraints(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "not_real_string.json"),
+        "--output",
+        str(output_file),
+        "--collapse-root-models",
+        "--field-constraints",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "collapse_root_models_field_constraints.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_collapse_root_models_with_references_to_flat_types(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "flat_type.jsonschema"),
+        "--output",
+        str(output_file),
+        "--collapse-root-models",
+    ])
+
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "collapse_root_models_with_references_to_flat_types.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_max_items_enum(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "max_items_enum.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "max_items_enum.py").read_text()
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "const.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "const_pydantic_v2.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+def test_main_openapi_const(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "const.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--output-model",
+        output_model,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / expected_output).read_text()
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "const_field.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "const_field_pydantic_v2.py",
+        ),
+        (
+            "msgspec.Struct",
+            "const_field_msgspec.py",
+        ),
+        (
+            "typing.TypedDict",
+            "const_field_typed_dict.py",
+        ),
+        (
+            "dataclasses.dataclass",
+            "const_field_dataclass.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+def test_main_openapi_const_field(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "const.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--output-model",
+        output_model,
+        "--collapse-root-models",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / expected_output).read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_complex_reference(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "complex_reference.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "complex_reference.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_reference_to_object_properties(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "reference_to_object_properties.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "reference_to_object_properties.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_reference_to_object_properties_collapse_root_models(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "reference_to_object_properties.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--collapse-root-models",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "reference_to_object_properties_collapse_root_models.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_override_required_all_of_field(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "override_required_all_of.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--collapse-root-models",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "override_required_all_of.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_use_default_kwarg(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "nullable.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--use-default-kwarg",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "use_default_kwarg.py").read_text()
+
+
+@pytest.mark.parametrize(
+    ("input_", "output"),
+    [
+        (
+            "discriminator.yaml",
+            "general.py",
+        ),
+        (
+            "discriminator_without_mapping.yaml",
+            "without_mapping.py",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+def test_main_openapi_discriminator(input_: str, output: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / input_),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "discriminator" / output).read_text()
+
+
+@freeze_time("2023-07-27")
+@pytest.mark.parametrize(
+    ("kind", "option", "expected"),
+    [
+        (
+            "anyOf",
+            "--collapse-root-models",
+            "in_array_collapse_root_models.py",
+        ),
+        (
+            "oneOf",
+            "--collapse-root-models",
+            "in_array_collapse_root_models.py",
+        ),
+        ("anyOf", None, "in_array.py"),
+        ("oneOf", None, "in_array.py"),
+    ],
+)
+def test_main_openapi_discriminator_in_array(kind: str, option: str | None, expected: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    input_file = f"discriminator_in_array_{kind.lower()}.yaml"
+    return_code: Exit = main([
+        a
+        for a in [
+            "--input",
+            str(OPEN_API_DATA_PATH / input_file),
+            "--output",
+            str(output_file),
+            "--input-file-type",
+            "openapi",
+            option,
+        ]
+        if a
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (
+        EXPECTED_OPENAPI_PATH / "discriminator" / expected
+    ).read_text().replace("discriminator_in_array.yaml", input_file)
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "default_object",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "pydantic_v2_default_object",
+        ),
+        (
+            "msgspec.Struct",
+            "msgspec_default_object",
+        ),
+    ],
+)
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_default_object(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "default_object.yaml"),
+        "--output",
+        str(tmp_path),
+        "--output-model",
+        output_model,
+        "--input-file-type",
+        "openapi",
+        "--target-python-version",
+        "3.9",
+    ])
+    assert return_code == Exit.OK
+
+    main_modular_dir = EXPECTED_OPENAPI_PATH / expected_output
+    for path in main_modular_dir.rglob("*.py"):
+        result = tmp_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        assert result == path.read_text(), path
+
+
+@freeze_time("2019-07-26")
+def test_main_dataclass(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "dataclasses.dataclass",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "dataclass.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_dataclass_base_class(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "dataclasses.dataclass",
+        "--base-class",
+        "custom_base.Base",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "dataclass_base_class.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_reference_same_hierarchy_directory(tmp_path: Path) -> None:
+    with chdir(OPEN_API_DATA_PATH / "reference_same_hierarchy_directory"):
+        output_file: Path = tmp_path / "output.py"
+        return_code: Exit = main([
+            "--input",
+            "./public/entities.yaml",
+            "--output",
+            str(output_file),
+            "--input-file-type",
+            "openapi",
+        ])
+        assert return_code == Exit.OK
+        assert (
+            output_file.read_text(encoding="utf-8")
+            == (EXPECTED_OPENAPI_PATH / "reference_same_hierarchy_directory.py").read_text()
+        )
+
+
+@freeze_time("2019-07-26")
+def test_main_multiple_required_any_of(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "multiple_required_any_of.yaml"),
+        "--output",
+        str(output_file),
+        "--collapse-root-models",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "multiple_required_any_of.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_max_min(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "max_min_number.yaml"),
+        "--output",
+        str(output_file),
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "max_min_number.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_use_operation_id_as_name(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--use-operation-id-as-name",
+        "--openapi-scopes",
+        "paths",
+        "schemas",
+        "parameters",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "use_operation_id_as_name.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_use_operation_id_as_name_not_found_operation_id(
+    capsys: pytest.CaptureFixture, tmp_path: Path
+) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "body_and_parameters.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--use-operation-id-as-name",
+        "--openapi-scopes",
+        "paths",
+        "schemas",
+        "parameters",
+    ])
+    captured = capsys.readouterr()
+    assert return_code == Exit.ERROR
+    assert (
+        captured.err == "All operations must have an operationId when --use_operation_id_as_name is set."
+        "The following path was missing an operationId: pets\n"
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_unsorted_optional_fields(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "unsorted_optional_fields.yaml"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "dataclasses.dataclass",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "unsorted_optional_fields.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_typed_dict(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "typing.TypedDict",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "typed_dict.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_typed_dict_py(min_version: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "typing.TypedDict",
+        "--target-python-version",
+        min_version,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "typed_dict_py.py").read_text()
+
+
+@pytest.mark.skipif(
+    version.parse(black.__version__) < version.parse("23.3.0"),
+    reason="Require Black version 23.3.0 or later ",
+)
+def test_main_modular_typed_dict(tmp_path: Path) -> None:
+    """Test main function on modular file."""
+
+    input_filename = OPEN_API_DATA_PATH / "modular.yaml"
+    output_path = tmp_path / "model"
+
+    with freeze_time(TIMESTAMP):
+        main([
+            "--input",
+            str(input_filename),
+            "--output",
+            str(output_path),
+            "--output-model-type",
+            "typing.TypedDict",
+            "--target-python-version",
+            "3.11",
+        ])
+    main_modular_dir = EXPECTED_OPENAPI_PATH / "modular_typed_dict"
+    for path in main_modular_dir.rglob("*.py"):
+        result = output_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        assert result == path.read_text()
+
+
+@pytest.mark.skipif(
+    version.parse(black.__version__) < version.parse("23.3.0"),
+    reason="Require Black version 23.3.0 or later ",
+)
+@freeze_time("2019-07-26")
+def test_main_typed_dict_nullable(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "nullable.yaml"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "typing.TypedDict",
+        "--target-python-version",
+        "3.11",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "typed_dict_nullable.py").read_text()
+
+
+@pytest.mark.skipif(
+    version.parse(black.__version__) < version.parse("23.3.0"),
+    reason="Require Black version 23.3.0 or later ",
+)
+@freeze_time("2019-07-26")
+def test_main_typed_dict_nullable_strict_nullable(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "nullable.yaml"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "typing.TypedDict",
+        "--target-python-version",
+        "3.11",
+        "--strict-nullable",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "typed_dict_nullable_strict_nullable.py").read_text()
+    )
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_openapi_nullable_31(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "nullable_31.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--strip-default-none",
+        "--use-union-operator",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "nullable_31.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_custom_file_header_path(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--custom-file-header-path",
+        str(DATA_PATH / "custom_file_header.txt"),
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "custom_file_header.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_custom_file_header_duplicate_options(capsys: pytest.CaptureFixture, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--custom-file-header-path",
+        str(DATA_PATH / "custom_file_header.txt"),
+        "--custom-file-header",
+        "abc",
+    ])
+
+    captured = capsys.readouterr()
+    assert return_code == Exit.ERROR
+    assert captured.err == "`--custom_file_header_path` can not be used with `--custom_file_header`.\n"
+
+
+@freeze_time("2019-07-26")
+def test_main_pydantic_v2(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "pydantic_v2.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_custom_id_pydantic_v2(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "custom_id.yaml"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "custom_id_pydantic_v2.py").read_text()
+
+
+@pytest.mark.skipif(
+    not isort.__version__.startswith("4."),
+    reason="isort 5.x don't sort pydantic modules",
+)
+@freeze_time("2019-07-26")
+def test_main_openapi_custom_id_pydantic_v2_custom_base(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "custom_id.yaml"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--base-class",
+        "custom_base.Base",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "custom_id_pydantic_v2_custom_base.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_all_of_with_relative_ref(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "all_of_with_relative_ref" / "openapi.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--keep-model-order",
+        "--collapse-root-models",
+        "--field-constraints",
+        "--use-title-as-name",
+        "--field-include-all-keys",
+        "--use-field-description",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "all_of_with_relative_ref.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_msgspec_struct(min_version: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--target-python-version",
+        min_version,
+        "--output-model-type",
+        "msgspec.Struct",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "msgspec_struct.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_msgspec_struct_snake_case(min_version: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api_ordered_required_fields.yaml"),
+        "--output",
+        str(output_file),
+        "--target-python-version",
+        min_version,
+        "--snake-case-field",
+        "--output-model-type",
+        "msgspec.Struct",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "msgspec_struct_snake_case.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_msgspec_use_annotated_with_field_constraints(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api_constrained.yaml"),
+        "--output",
+        str(output_file),
+        "--field-constraints",
+        "--target-python-version",
+        "3.9",
+        "--output-model-type",
+        "msgspec.Struct",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "msgspec_use_annotated_with_field_constraints.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_discriminator_one_literal_as_default(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "discriminator_enum.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--use-one-literal-as-default",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "discriminator" / "enum_one_literal_as_default.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_discriminator_one_literal_as_default_dataclass(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "discriminator_enum.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--output-model-type",
+        "dataclasses.dataclass",
+        "--use-one-literal-as-default",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "discriminator" / "dataclass_enum_one_literal_as_default.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_keyword_only_dataclass(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "inheritance.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--output-model-type",
+        "dataclasses.dataclass",
+        "--keyword-only",
+        "--target-python-version",
+        "3.10",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "dataclass_keyword_only.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_keyword_only_dataclass_with_python_3_9(capsys: pytest.CaptureFixture) -> None:
+    return_code = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "inheritance.yaml"),
+        "--input-file-type",
+        "openapi",
+        "--output-model-type",
+        "dataclasses.dataclass",
+        "--keyword-only",
+        "--target-python-version",
+        "3.9",
+    ])
+    assert return_code == Exit.ERROR
+    captured = capsys.readouterr()
+    assert not captured.out
+    assert captured.err == "`--keyword-only` requires `--target-python-version` 3.10 or higher.\n"
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_dataclass_with_naive_datetime(capsys: pytest.CaptureFixture) -> None:
+    return_code = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "inheritance.yaml"),
+        "--input-file-type",
+        "openapi",
+        "--output-model-type",
+        "dataclasses.dataclass",
+        "--output-datetime-class",
+        "NaiveDatetime",
+    ])
+    assert return_code == Exit.ERROR
+    captured = capsys.readouterr()
+    assert not captured.out
+    assert (
+        captured.err
+        == '`--output-datetime-class` only allows "datetime" for `--output-model-type` dataclasses.dataclass\n'
+    )
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_keyword_only_msgspec(min_version: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "inheritance.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--output-model-type",
+        "msgspec.Struct",
+        "--keyword-only",
+        "--target-python-version",
+        min_version,
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "msgspec_keyword_only.py").read_text()
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_keyword_only_msgspec_with_extra_data(min_version: str, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "inheritance.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--output-model-type",
+        "msgspec.Struct",
+        "--keyword-only",
+        "--target-python-version",
+        min_version,
+        "--extra-template-data",
+        str(OPEN_API_DATA_PATH / "extra_data_msgspec.json"),
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "msgspec_keyword_only_omit_defaults.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_generate_openapi_keyword_only_msgspec_with_extra_data(tmp_path: Path) -> None:
+    extra_data = json.loads((OPEN_API_DATA_PATH / "extra_data_msgspec.json").read_text())
+    output_file: Path = tmp_path / "output.py"
+    generate(
+        input_=OPEN_API_DATA_PATH / "inheritance.yaml",
+        output=output_file,
+        input_file_type=InputFileType.OpenAPI,
+        output_model_type=DataModelType.MsgspecStruct,
+        keyword_only=True,
+        target_python_version=PythonVersionMin,
+        extra_template_data=defaultdict(dict, extra_data),
+        # Following values are defaults in the CLI, but not in the API
+        openapi_scopes=[OpenAPIScope.Schemas],
+        # Following values are implied by `msgspec.Struct` in the CLI
+        use_annotated=True,
+        field_constraints=True,
+    )
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_OPENAPI_PATH / "msgspec_keyword_only_omit_defaults.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_referenced_default(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "referenced_default.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "referenced_default.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_duplicate_models(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "duplicate_models2.yaml"),
+        "--output",
+        str(output_file),
+        "--use-operation-id-as-name",
+        "--openapi-scopes",
+        "paths",
+        "schemas",
+        "parameters",
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--parent-scoped-naming",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "duplicate_models2.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_shadowed_imports(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "shadowed_imports.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "shadowed_imports.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_extra_fields_forbid(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "additional_properties.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+        "--extra-fields",
+        "forbid",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "additional_properties.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_openapi_same_name_objects(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "same_name_objects.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "openapi",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_OPENAPI_PATH / "same_name_objects.py").read_text()
diff -pruN 0.26.4-3/tests/main/test_main_csv.py 0.34.0-1/tests/main/test_main_csv.py
--- 0.26.4-3/tests/main/test_main_csv.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/main/test_main_csv.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,52 @@
+from __future__ import annotations
+
+from argparse import Namespace
+from typing import TYPE_CHECKING
+
+import pytest
+from freezegun import freeze_time
+
+from datamodel_code_generator.__main__ import Exit, main
+from tests.main.test_main_general import DATA_PATH, EXPECTED_MAIN_PATH
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+CSV_DATA_PATH: Path = DATA_PATH / "csv"
+EXPECTED_CSV_PATH: Path = EXPECTED_MAIN_PATH / "csv"
+
+
+@pytest.fixture(autouse=True)
+def reset_namespace(monkeypatch: pytest.MonkeyPatch) -> None:
+    namespace_ = Namespace(no_color=False)
+    monkeypatch.setattr("datamodel_code_generator.__main__.namespace", namespace_)
+    monkeypatch.setattr("datamodel_code_generator.arguments.namespace", namespace_)
+
+
+@freeze_time("2019-07-26")
+def test_csv_file(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(CSV_DATA_PATH / "simple.csv"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "csv",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_CSV_PATH / "csv_file_simple.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_csv_stdin(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    monkeypatch.setattr("sys.stdin", (CSV_DATA_PATH / "simple.csv").open())
+    return_code: Exit = main([
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "csv",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_CSV_PATH / "csv_stdin_simple.py").read_text()
diff -pruN 0.26.4-3/tests/main/test_main_general.py 0.34.0-1/tests/main/test_main_general.py
--- 0.26.4-3/tests/main/test_main_general.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/main/test_main_general.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,267 @@
+from __future__ import annotations
+
+from argparse import Namespace
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+import pytest
+from freezegun import freeze_time
+
+from datamodel_code_generator import (
+    DataModelType,
+    InputFileType,
+    generate,
+    snooper_to_methods,
+)
+from datamodel_code_generator.__main__ import Config, Exit, main
+from datamodel_code_generator.format import PythonVersion
+
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+DATA_PATH: Path = Path(__file__).parent.parent / "data"
+PYTHON_DATA_PATH: Path = DATA_PATH / "python"
+EXPECTED_MAIN_PATH = DATA_PATH / "expected" / "main"
+
+TIMESTAMP = "1985-10-26T01:21:00-07:00"
+
+
+@pytest.fixture(autouse=True)
+def reset_namespace(monkeypatch: pytest.MonkeyPatch) -> None:
+    namespace_ = Namespace(no_color=False)
+    monkeypatch.setattr("datamodel_code_generator.__main__.namespace", namespace_)
+    monkeypatch.setattr("datamodel_code_generator.arguments.namespace", namespace_)
+
+
+def test_debug(mocker: MockerFixture) -> None:
+    with pytest.raises(expected_exception=SystemExit):
+        main(["--debug", "--help"])
+
+    mocker.patch("datamodel_code_generator.pysnooper", None)
+    with pytest.raises(expected_exception=SystemExit):
+        main(["--debug", "--help"])
+
+
+@freeze_time("2019-07-26")
+def test_snooper_to_methods_without_pysnooper(mocker: MockerFixture) -> None:
+    mocker.patch("datamodel_code_generator.pysnooper", None)
+    mock = mocker.Mock()
+    assert snooper_to_methods()(mock) == mock
+
+
+@pytest.mark.parametrize(argnames="no_color", argvalues=[False, True])
+def test_show_help(no_color: bool, capsys: pytest.CaptureFixture[str]) -> None:
+    args = ["--no-color"] if no_color else []
+    args += ["--help"]
+
+    with pytest.raises(expected_exception=SystemExit) as context:
+        main(args)
+    assert context.value.code == Exit.OK
+
+    output = capsys.readouterr().out
+    assert ("\x1b" not in output) == no_color
+
+
+def test_show_help_when_no_input(mocker: MockerFixture) -> None:
+    print_help_mock = mocker.patch("datamodel_code_generator.__main__.arg_parser.print_help")
+    isatty_mock = mocker.patch("sys.stdin.isatty", return_value=True)
+    return_code: Exit = main([])
+    assert return_code == Exit.ERROR
+    assert isatty_mock.called
+    assert print_help_mock.called
+
+
+def test_no_args_has_default(monkeypatch: pytest.MonkeyPatch) -> None:
+    """
+    No argument should have a default value set because it would override pyproject.toml values.
+
+    Default values are set in __main__.Config class.
+    """
+    namespace = Namespace()
+    monkeypatch.setattr("datamodel_code_generator.__main__.namespace", namespace)
+    main([])
+    for field in Config.get_fields():
+        assert getattr(namespace, field, None) is None
+
+
+@freeze_time("2019-07-26")
+def test_space_and_special_characters_dict(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(PYTHON_DATA_PATH / "space_and_special_characters_dict.py"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "dict",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_MAIN_PATH / "space_and_special_characters_dict.py").read_text()
+    )
+
+
+@freeze_time("2024-12-14")
+def test_direct_input_dict(tmp_path: Path) -> None:
+    output_file = tmp_path / "output.py"
+    generate(
+        {"foo": 1, "bar": {"baz": 2}},
+        input_file_type=InputFileType.Dict,
+        output=output_file,
+        output_model_type=DataModelType.PydanticV2BaseModel,
+        snake_case_field=True,
+    )
+    assert output_file.read_text() == (EXPECTED_MAIN_PATH / "direct_input_dict.py").read_text()
+
+
+@freeze_time(TIMESTAMP)
+def test_frozen_dataclasses(tmp_path: Path) -> None:
+    """Test --frozen-dataclasses flag functionality."""
+    output_file = tmp_path / "output.py"
+    generate(
+        DATA_PATH / "jsonschema" / "simple_frozen_test.json",
+        input_file_type=InputFileType.JsonSchema,
+        output=output_file,
+        output_model_type=DataModelType.DataclassesDataclass,
+        frozen_dataclasses=True,
+    )
+    assert output_file.read_text() == (EXPECTED_MAIN_PATH / "frozen_dataclasses.py").read_text()
+
+
+@freeze_time(TIMESTAMP)
+def test_frozen_dataclasses_with_keyword_only(tmp_path: Path) -> None:
+    """Test --frozen-dataclasses with --keyword-only flag combination."""
+
+    output_file = tmp_path / "output.py"
+    generate(
+        DATA_PATH / "jsonschema" / "simple_frozen_test.json",
+        input_file_type=InputFileType.JsonSchema,
+        output=output_file,
+        output_model_type=DataModelType.DataclassesDataclass,
+        frozen_dataclasses=True,
+        keyword_only=True,
+        target_python_version=PythonVersion.PY_310,
+    )
+    assert output_file.read_text() == (EXPECTED_MAIN_PATH / "frozen_dataclasses_keyword_only.py").read_text()
+
+
+@freeze_time(TIMESTAMP)
+def test_frozen_dataclasses_command_line(tmp_path: Path) -> None:
+    """Test --frozen-dataclasses flag via command line."""
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(DATA_PATH / "jsonschema" / "simple_frozen_test.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--output-model-type",
+        "dataclasses.dataclass",
+        "--frozen-dataclasses",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_MAIN_PATH / "frozen_dataclasses.py").read_text()
+
+
+@freeze_time(TIMESTAMP)
+def test_frozen_dataclasses_with_keyword_only_command_line(tmp_path: Path) -> None:
+    """Test --frozen-dataclasses with --keyword-only flag via command line."""
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(DATA_PATH / "jsonschema" / "simple_frozen_test.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--output-model-type",
+        "dataclasses.dataclass",
+        "--frozen-dataclasses",
+        "--keyword-only",
+        "--target-python-version",
+        "3.10",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_MAIN_PATH / "frozen_dataclasses_keyword_only.py").read_text()
+    )
+
+
+def test_filename_with_newline_injection(tmp_path: Path) -> None:
+    """Test that filenames with newlines cannot inject code into generated files"""
+
+    schema_content = """{"type": "object", "properties": {"name": {"type": "string"}}}"""
+
+    malicious_filename = """schema.json
+# INJECTED CODE:
+import os
+os.system('echo INJECTED')
+# END INJECTION"""
+
+    output_path = tmp_path / "output.py"
+
+    generate(
+        input_=schema_content,
+        input_filename=malicious_filename,
+        input_file_type=InputFileType.JsonSchema,
+        output=output_path,
+    )
+
+    generated_content = output_path.read_text()
+
+    assert "#   filename:  schema.json # INJECTED CODE: import os" in generated_content, (
+        "Filename not properly sanitized"
+    )
+
+    assert not any(
+        line.strip().startswith("import os") and not line.strip().startswith("#")
+        for line in generated_content.split("\n")
+    )
+    assert not any("os.system" in line and not line.strip().startswith("#") for line in generated_content.split("\n"))
+
+    compile(generated_content, str(output_path), "exec")
+
+
+def test_filename_with_various_control_characters(tmp_path: Path) -> None:
+    """Test that various control characters in filenames are properly sanitized"""
+
+    schema_content = """{"type": "object", "properties": {"test": {"type": "string"}}}"""
+
+    test_cases = [
+        ("newline", "schema.json\nimport os; os.system('echo INJECTED')"),
+        ("carriage_return", "schema.json\rimport os; os.system('echo INJECTED')"),
+        ("crlf", "schema.json\r\nimport os; os.system('echo INJECTED')"),
+        ("tab_newline", "schema.json\t\nimport os; os.system('echo TAB')"),
+        ("form_feed", "schema.json\f\nimport os; os.system('echo FF')"),
+        ("vertical_tab", "schema.json\v\nimport os; os.system('echo VT')"),
+        ("unicode_line_separator", "schema.json\u2028import os; os.system('echo U2028')"),
+        ("unicode_paragraph_separator", "schema.json\u2029import os; os.system('echo U2029')"),
+        ("multiple_newlines", "schema.json\n\n\nimport os; os.system('echo MULTI')"),
+        ("mixed_characters", "schema.json\n\r\t\nimport os; os.system('echo MIXED')"),
+    ]
+
+    for test_name, malicious_filename in test_cases:
+        output_path = tmp_path / "output.py"
+
+        generate(
+            input_=schema_content,
+            input_filename=malicious_filename,
+            input_file_type=InputFileType.JsonSchema,
+            output=output_path,
+        )
+
+        generated_content = output_path.read_text()
+
+        assert not any(
+            line.strip().startswith("import ") and not line.strip().startswith("#")
+            for line in generated_content.split("\n")
+        ), f"Injection found for {test_name}"
+
+        assert not any(
+            "os.system" in line and not line.strip().startswith("#") for line in generated_content.split("\n")
+        ), f"System call found for {test_name}"
+
+        compile(generated_content, str(output_path), "exec")
diff -pruN 0.26.4-3/tests/main/test_main_json.py 0.34.0-1/tests/main/test_main_json.py
--- 0.26.4-3/tests/main/test_main_json.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/main/test_main_json.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,227 @@
+from __future__ import annotations
+
+from argparse import Namespace
+from typing import TYPE_CHECKING
+from unittest.mock import call
+
+import black
+import pytest
+from freezegun import freeze_time
+from packaging import version
+
+from datamodel_code_generator import (
+    chdir,
+)
+from datamodel_code_generator.__main__ import Exit, main
+from tests.main.test_main_general import DATA_PATH, EXPECTED_MAIN_PATH
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from pytest_mock import MockerFixture
+
+FixtureRequest = pytest.FixtureRequest
+
+
+JSON_DATA_PATH: Path = DATA_PATH / "json"
+EXPECTED_JSON_PATH: Path = EXPECTED_MAIN_PATH / "json"
+
+
+@pytest.fixture(autouse=True)
+def reset_namespace(monkeypatch: pytest.MonkeyPatch) -> None:
+    namespace_ = Namespace(no_color=False)
+    monkeypatch.setattr("datamodel_code_generator.__main__.namespace", namespace_)
+    monkeypatch.setattr("datamodel_code_generator.arguments.namespace", namespace_)
+
+
+@freeze_time("2019-07-26")
+def test_main_json(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_DATA_PATH / "pet.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "json",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_PATH / "general.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_space_and_special_characters_json(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_DATA_PATH / "space_and_special_characters.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "json",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_PATH / "space_and_special_characters.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_json_failed(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_DATA_PATH / "broken.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "json",
+    ])
+    assert return_code == Exit.ERROR
+
+
+@freeze_time("2019-07-26")
+def test_main_json_array_include_null(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_DATA_PATH / "array_include_null.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "json",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_PATH / "json_array_include_null.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_json_reuse_model(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_DATA_PATH / "duplicate_models.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "json",
+        "--reuse-model",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_PATH / "json_reuse_model.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_json_reuse_model_pydantic2(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_DATA_PATH / "duplicate_models.json"),
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "json",
+        "--reuse-model",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_PATH / "json_reuse_model_pydantic2.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_simple_json_snake_case_field(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    with chdir(JSON_DATA_PATH / "simple.json"):
+        return_code: Exit = main([
+            "--input",
+            "simple.json",
+            "--output",
+            str(output_file),
+            "--input-file-type",
+            "json",
+            "--snake-case-field",
+        ])
+        assert return_code == Exit.OK
+        assert (
+            output_file.read_text(encoding="utf-8")
+            == (EXPECTED_JSON_PATH / "simple_json_snake_case_field.py").read_text()
+        )
+
+
+@freeze_time("2019-07-26")
+def test_main_http_json(mocker: MockerFixture, tmp_path: Path) -> None:
+    def get_mock_response(path: str) -> mocker.Mock:
+        mock = mocker.Mock()
+        mock.text = (JSON_DATA_PATH / path).read_text()
+        return mock
+
+    httpx_get_mock = mocker.patch(
+        "httpx.get",
+        side_effect=[
+            get_mock_response("pet.json"),
+        ],
+    )
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--url",
+        "https://example.com/pet.json",
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "json",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_PATH / "general.py").read_text().replace(
+        "#   filename:  pet.json",
+        "#   filename:  https://example.com/pet.json",
+    )
+    httpx_get_mock.assert_has_calls([
+        call(
+            "https://example.com/pet.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+    ])
+
+
+@pytest.mark.skipif(
+    version.parse(black.__version__) < version.parse("23.3.0"),
+    reason="Require Black version 23.3.0 or later ",
+)
+@freeze_time("2019-07-26")
+def test_main_typed_dict_space_and_special_characters(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_DATA_PATH / "space_and_special_characters.json"),
+        "--output",
+        str(output_file),
+        "--output-model-type",
+        "typing.TypedDict",
+        "--target-python-version",
+        "3.11",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_JSON_PATH / "typed_dict_space_and_special_characters.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_json_snake_case_field(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(JSON_DATA_PATH / "snake_case.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "json",
+        "--snake-case-field",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_JSON_PATH / "json_snake_case_field.py").read_text()
diff -pruN 0.26.4-3/tests/main/test_main_yaml.py 0.34.0-1/tests/main/test_main_yaml.py
--- 0.26.4-3/tests/main/test_main_yaml.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/main/test_main_yaml.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,38 @@
+from __future__ import annotations
+
+from argparse import Namespace
+from typing import TYPE_CHECKING
+
+import pytest
+from freezegun import freeze_time
+
+from datamodel_code_generator.__main__ import Exit, main
+from tests.main.test_main_general import DATA_PATH, EXPECTED_MAIN_PATH
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+YAML_DATA_PATH: Path = DATA_PATH / "yaml"
+
+
+@pytest.fixture(autouse=True)
+def reset_namespace(monkeypatch: pytest.MonkeyPatch) -> None:
+    namespace_ = Namespace(no_color=False)
+    monkeypatch.setattr("datamodel_code_generator.__main__.namespace", namespace_)
+    monkeypatch.setattr("datamodel_code_generator.arguments.namespace", namespace_)
+
+
+@pytest.mark.benchmark
+@freeze_time("2019-07-26")
+def test_main_yaml(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(YAML_DATA_PATH / "pet.yaml"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "yaml",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_MAIN_PATH / "yaml.py").read_text()
diff -pruN 0.26.4-3/tests/main/test_types.py 0.34.0-1/tests/main/test_types.py
--- 0.26.4-3/tests/main/test_types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/main/test_types.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,61 @@
+from __future__ import annotations
+
+from datamodel_code_generator.format import PythonVersionMin
+from datamodel_code_generator.imports import (
+    IMPORT_LITERAL,
+    IMPORT_OPTIONAL,
+)
+from datamodel_code_generator.types import DataType
+
+
+def test_imports_with_literal_one() -> None:
+    """Test imports for a DataType with single literal value"""
+    data_type = DataType(literals=[""], python_version=PythonVersionMin)
+
+    # Convert iterator to list for assertion
+    imports = list(data_type.imports)
+    assert IMPORT_LITERAL in imports
+    assert len(imports) == 1
+
+
+def test_imports_with_literal_one_and_optional() -> None:
+    """Test imports for an optional DataType with single literal value"""
+    data_type = DataType(literals=[""], is_optional=True, python_version=PythonVersionMin)
+
+    imports = list(data_type.imports)
+    assert IMPORT_LITERAL in imports
+    assert IMPORT_OPTIONAL in imports
+    assert len(imports) == 2
+
+
+def test_imports_with_literal_empty() -> None:
+    """Test imports for a DataType with no literal values"""
+    data_type = DataType(literals=[], python_version=PythonVersionMin)
+
+    imports = list(data_type.imports)
+    assert len(imports) == 0
+
+
+def test_imports_with_nested_dict_key() -> None:
+    """Test imports for a DataType with dict_key containing literals"""
+    dict_key_type = DataType(literals=["key"], python_version=PythonVersionMin)
+
+    data_type = DataType(python_version=PythonVersionMin, dict_key=dict_key_type)
+
+    imports = list(data_type.imports)
+    assert IMPORT_LITERAL in imports
+    assert len(imports) == 1
+
+
+def test_imports_without_duplicate_literals() -> None:
+    """Test that literal import is not duplicated"""
+    dict_key_type = DataType(literals=["key1"], python_version=PythonVersionMin)
+
+    data_type = DataType(
+        literals=["key2"],
+        python_version=PythonVersionMin,
+        dict_key=dict_key_type,
+    )
+
+    imports = list(data_type.imports)
+    assert IMPORT_LITERAL in imports
diff -pruN 0.26.4-3/tests/model/dataclass/test_param.py 0.34.0-1/tests/model/dataclass/test_param.py
--- 0.26.4-3/tests/model/dataclass/test_param.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/model/dataclass/test_param.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,147 @@
+from __future__ import annotations
+
+from datamodel_code_generator.model.dataclass import DataClass, DataModelField
+from datamodel_code_generator.reference import Reference
+from datamodel_code_generator.types import DataType, Types
+
+
+def test_dataclass_without_frozen() -> None:
+    """Test dataclass generation without frozen parameter."""
+    reference = Reference(path="TestModel", name="TestModel")
+    field = DataModelField(
+        name="field1",
+        data_type=DataType(type=Types.string),
+        required=True,
+    )
+
+    dataclass = DataClass(
+        reference=reference,
+        fields=[field],
+        frozen=False,
+    )
+
+    rendered = dataclass.render()
+    assert "@dataclass" in rendered
+    assert "frozen=True" not in rendered
+    assert "class TestModel:" in rendered
+
+
+def test_dataclass_with_frozen() -> None:
+    """Test dataclass generation with frozen=True."""
+    reference = Reference(path="TestModel", name="TestModel")
+    field = DataModelField(
+        name="field1",
+        data_type=DataType(type=Types.string),
+        required=True,
+    )
+
+    dataclass = DataClass(
+        reference=reference,
+        fields=[field],
+        frozen=True,
+    )
+
+    rendered = dataclass.render()
+    assert "@dataclass(frozen=True)" in rendered
+    assert "class TestModel:" in rendered
+
+
+def test_dataclass_with_keyword_only_and_frozen() -> None:
+    """Test dataclass generation with both keyword_only and frozen parameters."""
+    reference = Reference(path="TestModel", name="TestModel")
+    field = DataModelField(
+        name="field1",
+        data_type=DataType(type=Types.string),
+        required=True,
+    )
+
+    dataclass = DataClass(
+        reference=reference,
+        fields=[field],
+        keyword_only=True,
+        frozen=True,
+    )
+
+    rendered = dataclass.render()
+    assert "@dataclass" in rendered
+    assert "kw_only=True" in rendered
+    assert "frozen=True" in rendered
+    assert "class TestModel:" in rendered
+
+
+def test_dataclass_with_only_keyword_only() -> None:
+    """Test dataclass generation with only keyword_only parameter."""
+    reference = Reference(path="TestModel", name="TestModel")
+    field = DataModelField(
+        name="field1",
+        data_type=DataType(type=Types.string),
+        required=True,
+    )
+
+    dataclass = DataClass(
+        reference=reference,
+        fields=[field],
+        keyword_only=True,
+        frozen=False,
+    )
+
+    rendered = dataclass.render()
+    assert "@dataclass" in rendered
+    assert "kw_only=True" in rendered
+    assert "frozen=True" not in rendered
+    assert "class TestModel:" in rendered
+
+
+def test_dataclass_frozen_attribute() -> None:
+    """Test that frozen attribute is properly stored."""
+    reference = Reference(path="TestModel", name="TestModel")
+    dataclass = DataClass(
+        reference=reference,
+        fields=[],
+        frozen=True,
+    )
+
+    assert dataclass.frozen is True
+
+
+def test_dataclass_frozen_false_attribute() -> None:
+    """Test that frozen attribute defaults to False."""
+    reference = Reference(path="TestModel", name="TestModel")
+
+    dataclass = DataClass(
+        reference=reference,
+        fields=[],
+    )
+
+    assert dataclass.frozen is False
+
+
+def test_dataclass_kw_only_true_only() -> None:
+    """Test dataclass generation with kw_only=True only (comprehensive test)."""
+    reference = Reference(path="TestModel", name="TestModel")
+    field1 = DataModelField(
+        name="field1",
+        data_type=DataType(type=Types.string),
+        required=True,
+    )
+    field2 = DataModelField(
+        name="field2",
+        data_type=DataType(type=Types.integer),
+        required=False,
+    )
+
+    dataclass = DataClass(
+        reference=reference,
+        fields=[field1, field2],
+        keyword_only=True,
+    )
+
+    rendered = dataclass.render()
+    # Should have @dataclass(kw_only=True) but not frozen=True
+    assert "@dataclass(kw_only=True)" in rendered
+    assert "frozen=True" not in rendered
+    assert "class TestModel:" in rendered
+
+    # Verify frozen attribute is False (default)
+    assert dataclass.frozen is False
+    assert dataclass.keyword_only is True
diff -pruN 0.26.4-3/tests/model/pydantic/test_base_model.py 0.34.0-1/tests/model/pydantic/test_base_model.py
--- 0.26.4-3/tests/model/pydantic/test_base_model.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/model/pydantic/test_base_model.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,141 @@
+from __future__ import annotations
+
+from typing import Any
+
+import pytest
+
+from datamodel_code_generator.model.pydantic import DataTypeManager
+from datamodel_code_generator.model.pydantic.base_model import BaseModel, DataModelField
+from datamodel_code_generator.reference import Reference
+from datamodel_code_generator.types import DataType, Types
+
+
+def test_base_model() -> None:
+    field = DataModelField(name="a", data_type=DataType(type="str"), required=True)
+
+    base_model = BaseModel(
+        fields=[field],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert base_model.name == "test_model"
+    assert base_model.fields == [field]
+    assert base_model.decorators == []
+    assert base_model.render() == "class test_model(BaseModel):\n    a: str"
+
+
+def test_base_model_optional() -> None:
+    field = DataModelField(name="a", data_type=DataType(type="str"), default="abc", required=False)
+
+    base_model = BaseModel(
+        fields=[field],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert base_model.name == "test_model"
+    assert base_model.fields == [field]
+    assert base_model.decorators == []
+    assert base_model.render() == "class test_model(BaseModel):\n    a: Optional[str] = 'abc'"
+
+
+def test_base_model_nullable_required() -> None:
+    field = DataModelField(
+        name="a",
+        data_type=DataType(type="str"),
+        default="abc",
+        required=True,
+        nullable=True,
+    )
+
+    base_model = BaseModel(
+        fields=[field],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert base_model.name == "test_model"
+    assert base_model.fields == [field]
+    assert base_model.decorators == []
+    assert base_model.render() == "class test_model(BaseModel):\n    a: Optional[str] = Field(...)"
+
+
+def test_base_model_strict_non_nullable_required() -> None:
+    field = DataModelField(
+        name="a",
+        data_type=DataType(type="str"),
+        default="abc",
+        required=True,
+        nullable=False,
+    )
+
+    base_model = BaseModel(
+        fields=[field],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert base_model.name == "test_model"
+    assert base_model.fields == [field]
+    assert base_model.decorators == []
+    assert base_model.render() == "class test_model(BaseModel):\n    a: str"
+
+
+def test_base_model_decorator() -> None:
+    field = DataModelField(name="a", data_type=DataType(type="str"), default="abc", required=False)
+
+    base_model = BaseModel(
+        fields=[field],
+        decorators=["@validate"],
+        base_classes=[Reference(name="Base", original_name="Base", path="Base")],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert base_model.name == "test_model"
+    assert base_model.fields == [field]
+    assert base_model.base_class == "Base"
+    assert base_model.decorators == ["@validate"]
+    assert base_model.render() == "@validate\nclass test_model(Base):\n    a: Optional[str] = 'abc'"
+
+
+def test_base_model_get_data_type() -> None:
+    data_type_manager = DataTypeManager()
+    assert data_type_manager.get_data_type(Types.integer) == data_type_manager.data_type(type="int")
+
+
+@pytest.mark.parametrize(
+    ("kwargs", "expected"),
+    [
+        ({"required": True}, None),
+        ({"required": True, "example": "example"}, None),
+        ({"example": "example"}, None),
+        (
+            {"required": True, "default": 123, "example": "example"},
+            None,
+        ),
+        (
+            {"required": False, "default": 123, "example": "example"},
+            None,
+        ),
+        ({"description": "description"}, None),
+        ({"title": "title"}, None),
+        ({"alias": "alias"}, "Field(None, alias='alias')"),
+        ({"example": True}, None),
+        ({"examples": True}, None),
+        (
+            {
+                "example": True,
+                "description": "description",
+                "title": "title",
+                "alias": "alias",
+            },
+            "Field(None, alias='alias')",
+        ),
+        ({"examples": [1, 2, 3]}, None),
+        (
+            {"examples": {"name": "dog", "age": 1}},
+            None,
+        ),
+        ({"default": "abc", "title": "title"}, None),
+        ({"default": 123, "title": "title"}, None),
+    ],
+)
+def test_data_model_field(kwargs: dict[str, Any], expected: str | None) -> None:
+    assert DataModelField(**kwargs, data_type=DataType()).field == expected
diff -pruN 0.26.4-3/tests/model/pydantic/test_constraint.py 0.34.0-1/tests/model/pydantic/test_constraint.py
--- 0.26.4-3/tests/model/pydantic/test_constraint.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/model/pydantic/test_constraint.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,21 @@
+from __future__ import annotations
+
+import pytest
+
+from datamodel_code_generator.model.pydantic.base_model import Constraints
+from datamodel_code_generator.types import UnionIntFloat
+
+
+@pytest.mark.parametrize(
+    ("gt", "expected"),
+    [
+        (None, False),
+        (4, True),
+        (0, True),
+        (0.0, True),
+    ],
+)
+def test_constraint(gt: float | None, expected: bool) -> None:
+    constraints = Constraints()
+    constraints.gt = UnionIntFloat(gt) if gt is not None else None
+    assert constraints.has_constraints == expected
diff -pruN 0.26.4-3/tests/model/pydantic/test_custom_root_type.py 0.34.0-1/tests/model/pydantic/test_custom_root_type.py
--- 0.26.4-3/tests/model/pydantic/test_custom_root_type.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/model/pydantic/test_custom_root_type.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,69 @@
+from __future__ import annotations
+
+from datamodel_code_generator.model import DataModelFieldBase
+from datamodel_code_generator.model.pydantic import DataTypeManager
+from datamodel_code_generator.model.pydantic.custom_root_type import CustomRootType
+from datamodel_code_generator.reference import Reference
+from datamodel_code_generator.types import DataType, Types
+
+
+def test_custom_root_type() -> None:
+    custom_root_type = CustomRootType(
+        fields=[
+            DataModelFieldBase(
+                name="a",
+                data_type=DataType(type="str"),
+                default="abc",
+                required=False,
+            )
+        ],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert custom_root_type.name == "test_model"
+    assert custom_root_type.fields == [
+        DataModelFieldBase(
+            name="a",
+            data_type=DataType(type="str"),
+            default="abc",
+            required=False,
+            parent=custom_root_type,
+        )
+    ]
+
+    assert custom_root_type.render() == ("class test_model(BaseModel):\n    __root__: Optional[str] = 'abc'")
+
+
+def test_custom_root_type_required() -> None:
+    custom_root_type = CustomRootType(
+        fields=[DataModelFieldBase(data_type=DataType(type="str"), required=True)],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert custom_root_type.name == "test_model"
+    assert custom_root_type.fields == [
+        DataModelFieldBase(data_type=DataType(type="str"), required=True, parent=custom_root_type)
+    ]
+
+    assert custom_root_type.render() == ("class test_model(BaseModel):\n    __root__: str")
+
+
+def test_custom_root_type_decorator() -> None:
+    custom_root_type = CustomRootType(
+        fields=[DataModelFieldBase(data_type=DataType(type="str"), required=True)],
+        decorators=["@validate"],
+        base_classes=[Reference(name="Base", original_name="Base", path="Base")],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert custom_root_type.name == "test_model"
+    assert custom_root_type.fields == [
+        DataModelFieldBase(data_type=DataType(type="str"), required=True, parent=custom_root_type)
+    ]
+    assert custom_root_type.base_class == "Base"
+    assert custom_root_type.render() == "@validate\nclass test_model(Base):\n    __root__: str"
+
+
+def test_custom_root_type_get_data_type() -> None:
+    data_type_manager = DataTypeManager()
+    assert data_type_manager.get_data_type(Types.integer) == data_type_manager.data_type(type="int")
diff -pruN 0.26.4-3/tests/model/pydantic/test_data_class.py 0.34.0-1/tests/model/pydantic/test_data_class.py
--- 0.26.4-3/tests/model/pydantic/test_data_class.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/model/pydantic/test_data_class.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,55 @@
+from __future__ import annotations
+
+from datamodel_code_generator.model import DataModelFieldBase
+from datamodel_code_generator.model.pydantic import DataTypeManager
+from datamodel_code_generator.model.pydantic.dataclass import DataClass
+from datamodel_code_generator.reference import Reference
+from datamodel_code_generator.types import DataType, Types
+
+
+def test_data_class() -> None:
+    field = DataModelFieldBase(name="a", data_type=DataType(type="str"), required=True)
+
+    data_class = DataClass(
+        fields=[field],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert data_class.name == "test_model"
+    assert data_class.fields == [field]
+    assert data_class.decorators == []
+    assert data_class.render() == "@dataclass\nclass test_model:\n    a: str"
+
+
+def test_data_class_base_class() -> None:
+    field = DataModelFieldBase(name="a", data_type=DataType(type="str"), required=True)
+
+    data_class = DataClass(
+        fields=[field],
+        base_classes=[Reference(name="Base", original_name="Base", path="Base")],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert data_class.name == "test_model"
+    assert data_class.fields == [field]
+    assert data_class.decorators == []
+    assert data_class.render() == "@dataclass\nclass test_model(Base):\n    a: str"
+
+
+def test_data_class_optional() -> None:
+    field = DataModelFieldBase(name="a", data_type=DataType(type="str"), default="'abc'", required=True)
+
+    data_class = DataClass(
+        fields=[field],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert data_class.name == "test_model"
+    assert data_class.fields == [field]
+    assert data_class.decorators == []
+    assert data_class.render() == "@dataclass\nclass test_model:\n    a: str = 'abc'"
+
+
+def test_data_class_get_data_type() -> None:
+    data_type_manager = DataTypeManager()
+    assert data_type_manager.get_data_type(Types.integer) == data_type_manager.data_type(type="int")
diff -pruN 0.26.4-3/tests/model/pydantic/test_types.py 0.34.0-1/tests/model/pydantic/test_types.py
--- 0.26.4-3/tests/model/pydantic/test_types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/model/pydantic/test_types.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,439 @@
+from __future__ import annotations
+
+from decimal import Decimal
+from typing import Any
+
+import pytest
+
+from datamodel_code_generator.imports import Import
+from datamodel_code_generator.model.pydantic.imports import (
+    IMPORT_CONDECIMAL,
+    IMPORT_CONFLOAT,
+    IMPORT_CONINT,
+    IMPORT_CONSTR,
+    IMPORT_NEGATIVE_FLOAT,
+    IMPORT_NEGATIVE_INT,
+    IMPORT_NON_NEGATIVE_FLOAT,
+    IMPORT_NON_NEGATIVE_INT,
+    IMPORT_NON_POSITIVE_FLOAT,
+    IMPORT_NON_POSITIVE_INT,
+    IMPORT_POSITIVE_FLOAT,
+    IMPORT_POSITIVE_INT,
+)
+from datamodel_code_generator.model.pydantic.types import DataTypeManager
+from datamodel_code_generator.types import DataType, Types, UnionIntFloat
+
+
+@pytest.mark.parametrize(
+    ("types", "use_non_positive_negative_number_constrained_types", "params", "data_type"),
+    [
+        (Types.integer, False, {}, {"type": "int"}),
+        (
+            Types.integer,
+            False,
+            {"maximum": 10},
+            {
+                "type": "conint",
+                "is_func": True,
+                "kwargs": {"le": 10},
+                "import_": IMPORT_CONINT,
+            },
+        ),
+        (
+            Types.integer,
+            False,
+            {"exclusiveMaximum": 10},
+            {
+                "type": "conint",
+                "is_func": True,
+                "kwargs": {"lt": 10},
+                "import_": IMPORT_CONINT,
+            },
+        ),
+        (
+            Types.integer,
+            False,
+            {"minimum": 10},
+            {
+                "type": "conint",
+                "is_func": True,
+                "kwargs": {"ge": 10},
+                "import_": IMPORT_CONINT,
+            },
+        ),
+        (
+            Types.integer,
+            False,
+            {"exclusiveMinimum": 10},
+            {
+                "type": "conint",
+                "is_func": True,
+                "kwargs": {"gt": 10},
+                "import_": IMPORT_CONINT,
+            },
+        ),
+        (
+            Types.integer,
+            False,
+            {"multipleOf": 10},
+            {
+                "type": "conint",
+                "is_func": True,
+                "kwargs": {"multiple_of": 10},
+                "import_": IMPORT_CONINT,
+            },
+        ),
+        (
+            Types.integer,
+            False,
+            {"exclusiveMinimum": 0},
+            {"type": "PositiveInt", "import_": IMPORT_POSITIVE_INT},
+        ),
+        (
+            Types.integer,
+            False,
+            {"exclusiveMaximum": 0},
+            {"type": "NegativeInt", "import_": IMPORT_NEGATIVE_INT},
+        ),
+        (
+            Types.integer,
+            True,
+            {"minimum": 0},
+            {"type": "NonNegativeInt", "import_": IMPORT_NON_NEGATIVE_INT},
+        ),
+        (
+            Types.integer,
+            True,
+            {"maximum": 0},
+            {"type": "NonPositiveInt", "import_": IMPORT_NON_POSITIVE_INT},
+        ),
+        (
+            Types.integer,
+            False,
+            {"minimum": 0},
+            {
+                "type": "conint",
+                "is_func": True,
+                "kwargs": {"ge": 0},
+                "import_": IMPORT_CONINT,
+            },
+        ),
+        (
+            Types.integer,
+            False,
+            {"maximum": 0},
+            {
+                "type": "conint",
+                "is_func": True,
+                "kwargs": {"le": 0},
+                "import_": IMPORT_CONINT,
+            },
+        ),
+    ],
+)
+def test_get_data_int_type(
+    types: Types,
+    use_non_positive_negative_number_constrained_types: bool,
+    params: dict[str, Any],
+    data_type: dict[str, Any],
+) -> None:
+    data_type_manager = DataTypeManager(
+        use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types
+    )
+    assert (
+        data_type_manager.get_data_int_type(types, **params).dict() == data_type_manager.data_type(**data_type).dict()
+    )
+
+
+@pytest.mark.parametrize(
+    ("types", "use_non_positive_negative_number_constrained_types", "params", "data_type"),
+    [
+        (Types.float, False, {}, {"type": "float"}),
+        (
+            Types.float,
+            False,
+            {"maximum": 10},
+            {
+                "type": "confloat",
+                "is_func": True,
+                "kwargs": {"le": 10},
+                "import_": IMPORT_CONFLOAT,
+            },
+        ),
+        (
+            Types.float,
+            False,
+            {"exclusiveMaximum": 10},
+            {
+                "type": "confloat",
+                "is_func": True,
+                "kwargs": {"lt": 10.0},
+                "import_": IMPORT_CONFLOAT,
+            },
+        ),
+        (
+            Types.float,
+            False,
+            {"minimum": 10},
+            {
+                "type": "confloat",
+                "is_func": True,
+                "kwargs": {"ge": 10.0},
+                "import_": IMPORT_CONFLOAT,
+            },
+        ),
+        (
+            Types.float,
+            False,
+            {"exclusiveMinimum": 10},
+            {
+                "type": "confloat",
+                "is_func": True,
+                "kwargs": {"gt": 10.0},
+                "import_": IMPORT_CONFLOAT,
+            },
+        ),
+        (
+            Types.float,
+            False,
+            {"multipleOf": 10},
+            {
+                "type": "confloat",
+                "is_func": True,
+                "kwargs": {"multiple_of": 10.0},
+                "import_": IMPORT_CONFLOAT,
+            },
+        ),
+        (
+            Types.float,
+            False,
+            {"exclusiveMinimum": 0},
+            {"type": "PositiveFloat", "import_": IMPORT_POSITIVE_FLOAT},
+        ),
+        (
+            Types.float,
+            False,
+            {"exclusiveMaximum": 0},
+            {"type": "NegativeFloat", "import_": IMPORT_NEGATIVE_FLOAT},
+        ),
+        (
+            Types.float,
+            True,
+            {"maximum": 0},
+            {"type": "NonPositiveFloat", "import_": IMPORT_NON_POSITIVE_FLOAT},
+        ),
+        (
+            Types.float,
+            True,
+            {"minimum": 0},
+            {"type": "NonNegativeFloat", "import_": IMPORT_NON_NEGATIVE_FLOAT},
+        ),
+        (
+            Types.float,
+            False,
+            {"maximum": 0},
+            {
+                "type": "confloat",
+                "is_func": True,
+                "kwargs": {"le": 0.0},
+                "import_": IMPORT_CONFLOAT,
+            },
+        ),
+        (
+            Types.float,
+            False,
+            {"minimum": 0},
+            {
+                "type": "confloat",
+                "is_func": True,
+                "kwargs": {"ge": 0.0},
+                "import_": IMPORT_CONFLOAT,
+            },
+        ),
+    ],
+)
+def test_get_data_float_type(
+    types: Types,
+    use_non_positive_negative_number_constrained_types: bool,
+    params: dict[str, Any],
+    data_type: dict[str, Any],
+) -> None:
+    data_type_manager = DataTypeManager(
+        use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types
+    )
+    assert data_type_manager.get_data_float_type(types, **params) == data_type_manager.data_type(**data_type)
+
+
+@pytest.mark.parametrize(
+    ("types", "params", "data_type"),
+    [
+        (
+            Types.decimal,
+            {},
+            {"type": "Decimal", "import_": Import(from_="decimal", import_="Decimal")},
+        ),
+        (
+            Types.decimal,
+            {"maximum": 10},
+            {
+                "type": "condecimal",
+                "is_func": True,
+                "kwargs": {"le": 10},
+                "import_": IMPORT_CONDECIMAL,
+            },
+        ),
+        (
+            Types.decimal,
+            {"exclusiveMaximum": 10},
+            {
+                "type": "condecimal",
+                "is_func": True,
+                "kwargs": {"lt": 10},
+                "import_": IMPORT_CONDECIMAL,
+            },
+        ),
+        (
+            Types.decimal,
+            {"minimum": 10},
+            {
+                "type": "condecimal",
+                "is_func": True,
+                "kwargs": {"ge": 10},
+                "import_": IMPORT_CONDECIMAL,
+            },
+        ),
+        (
+            Types.decimal,
+            {"exclusiveMinimum": 10},
+            {
+                "type": "condecimal",
+                "is_func": True,
+                "kwargs": {"gt": 10},
+                "import_": IMPORT_CONDECIMAL,
+            },
+        ),
+        (
+            Types.decimal,
+            {"multipleOf": 10},
+            {
+                "type": "condecimal",
+                "is_func": True,
+                "kwargs": {"multiple_of": 10},
+                "import_": IMPORT_CONDECIMAL,
+            },
+        ),
+        (
+            Types.decimal,
+            {"minimum": UnionIntFloat(10.01)},
+            {
+                "type": "condecimal",
+                "is_func": True,
+                "kwargs": {"ge": Decimal("10.01")},
+                "import_": IMPORT_CONDECIMAL,
+            },
+        ),
+    ],
+)
+def test_get_data_decimal_type(types: Types, params: dict[str, Any], data_type: dict[str, Any]) -> None:
+    data_type_manager = DataTypeManager()
+    assert data_type_manager.get_data_decimal_type(types, **params) == data_type_manager.data_type(**data_type)
+
+
+@pytest.mark.parametrize(
+    ("types", "params", "data_type"),
+    [
+        (Types.string, {}, {"type": "str"}),
+        (
+            Types.string,
+            {"pattern": "^abc"},
+            {
+                "type": "constr",
+                "is_func": True,
+                "kwargs": {"regex": "r'^abc'"},
+                "import_": IMPORT_CONSTR,
+            },
+        ),
+        (
+            Types.string,
+            {"minLength": 10},
+            {
+                "type": "constr",
+                "is_func": True,
+                "kwargs": {"min_length": 10},
+                "import_": IMPORT_CONSTR,
+            },
+        ),
+        (
+            Types.string,
+            {"maxLength": 10},
+            {
+                "type": "constr",
+                "is_func": True,
+                "kwargs": {"max_length": 10},
+                "import_": IMPORT_CONSTR,
+            },
+        ),
+    ],
+)
+def test_get_data_str_type(types: Types, params: dict[str, Any], data_type: dict[str, Any]) -> None:
+    data_type_manager = DataTypeManager()
+    assert data_type_manager.get_data_str_type(types, **params) == data_type_manager.data_type(**data_type)
+
+
+@pytest.mark.parametrize(
+    ("types", "data_type"),
+    [
+        (Types.string, {"type": "str"}),
+        (Types.integer, {"type": "int"}),
+        (Types.float, {"type": "float"}),
+        (Types.boolean, {"type": "bool"}),
+        (
+            Types.decimal,
+            {"type": "Decimal", "import_": Import(from_="decimal", import_="Decimal")},
+        ),
+    ],
+)
+def test_get_data_type(types: Types, data_type: dict[str, str]) -> None:
+    data_type_manager = DataTypeManager()
+    assert data_type_manager.get_data_type(types) == data_type_manager.data_type(**data_type)
+
+
+def test_data_type_type_hint() -> None:
+    assert DataType(type="str").type_hint == "str"
+    assert DataType(type="constr", is_func=True).type_hint == "constr()"
+    assert DataType(type="constr", is_func=True, kwargs={"min_length": 10}).type_hint == "constr(min_length=10)"
+
+
+@pytest.mark.parametrize(
+    ("types", "data_type"),
+    [
+        ("string", {"type": "str"}),
+        (10, {"type": "int"}),
+        (20.3, {"type": "float"}),
+        (True, {"type": "bool"}),
+    ],
+)
+def test_get_data_type_from_value(types: Any, data_type: dict[str, str]) -> None:
+    data_type_manager = DataTypeManager()
+    assert data_type_manager.get_data_type_from_value(types) == data_type_manager.data_type(**data_type)
+
+
+@pytest.mark.parametrize(
+    ("types", "data_type"),
+    [
+        (
+            [1, 2, 3],
+            ("typing.List", False),
+        ),
+        (
+            {"a": 1, "b": 2, "c": 3},
+            ("typing.Dict", False),
+        ),
+        (None, ("typing.Any", False)),
+    ],
+)
+def test_get_data_type_from_full_path(types: Any, data_type: tuple[str, bool]) -> None:
+    data_type_manager = DataTypeManager()
+    assert data_type_manager.get_data_type_from_value(types) == data_type_manager.get_data_type_from_full_path(
+        *data_type
+    )
diff -pruN 0.26.4-3/tests/model/pydantic_v2/test_root_model.py 0.34.0-1/tests/model/pydantic_v2/test_root_model.py
--- 0.26.4-3/tests/model/pydantic_v2/test_root_model.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/model/pydantic_v2/test_root_model.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,67 @@
+from __future__ import annotations
+
+from datamodel_code_generator.model import DataModelFieldBase
+from datamodel_code_generator.model.pydantic_v2.root_model import RootModel
+from datamodel_code_generator.reference import Reference
+from datamodel_code_generator.types import DataType
+
+
+def test_root_model() -> None:
+    root_model = RootModel(
+        fields=[
+            DataModelFieldBase(
+                name="a",
+                data_type=DataType(type="str"),
+                default="abc",
+                required=False,
+            )
+        ],
+        reference=Reference(name="TestRootModel", path="test_root_model"),
+    )
+
+    assert root_model.name == "TestRootModel"
+    assert root_model.fields == [
+        DataModelFieldBase(
+            name="a",
+            data_type=DataType(type="str"),
+            default="abc",
+            required=False,
+            parent=root_model,
+        )
+    ]
+
+    assert root_model.base_class == "RootModel"
+    assert root_model.custom_base_class is None
+    assert root_model.render() == ("class TestRootModel(RootModel[Optional[str]]):\n    root: Optional[str] = 'abc'")
+
+
+def test_root_model_custom_base_class_is_ignored() -> None:
+    """Verify that passing a custom_base_class is ignored."""
+
+    root_model = RootModel(
+        custom_base_class="test.Test",
+        fields=[
+            DataModelFieldBase(
+                name="a",
+                data_type=DataType(type="str"),
+                default="abc",
+                required=False,
+            )
+        ],
+        reference=Reference(name="TestRootModel", path="test_root_model"),
+    )
+
+    assert root_model.name == "TestRootModel"
+    assert root_model.fields == [
+        DataModelFieldBase(
+            name="a",
+            data_type=DataType(type="str"),
+            default="abc",
+            required=False,
+            parent=root_model,
+        )
+    ]
+
+    assert root_model.base_class == "RootModel"
+    assert root_model.custom_base_class is None  # make sure it's ignored
+    assert root_model.render() == ("class TestRootModel(RootModel[Optional[str]]):\n    root: Optional[str] = 'abc'")
diff -pruN 0.26.4-3/tests/model/test_base.py 0.34.0-1/tests/model/test_base.py
--- 0.26.4-3/tests/model/test_base.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/model/test_base.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,321 @@
+from __future__ import annotations
+
+from pathlib import Path
+from tempfile import NamedTemporaryFile
+from typing import Any
+
+import pytest
+
+from datamodel_code_generator.model.base import (
+    DataModel,
+    DataModelFieldBase,
+    TemplateBase,
+    get_module_path,
+    sanitize_module_name,
+)
+from datamodel_code_generator.reference import Reference
+from datamodel_code_generator.types import DataType, Types
+
+
+class A(TemplateBase):
+    def __init__(self, path: Path) -> None:
+        self._path = path
+
+    @property
+    def template_file_path(self) -> Path:
+        return self._path
+
+    def render(self) -> str:  # noqa: PLR6301
+        return ""
+
+
+class B(DataModel):
+    @classmethod
+    def get_data_type(cls, types: Types, **kwargs: Any) -> DataType:
+        pass
+
+    def __init__(self, *args: Any, **kwargs: Any) -> None:
+        super().__init__(*args, **kwargs)
+
+    TEMPLATE_FILE_PATH = ""
+
+
+class C(DataModel):
+    @classmethod
+    def get_data_type(cls, types: Types, **kwargs: Any) -> DataType:
+        pass
+
+
+template: str = """{%- for decorator in decorators -%}
+{{ decorator }}
+{%- endfor %}
+@dataclass
+class {{ class_name }}:
+{%- for field in fields -%}
+    {%- if field.required %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- else %}
+    {{ field.name }}: {{ field.type_hint }} = {{field.default}}
+    {%- endif %}
+{%- endfor -%}"""
+
+
+def test_template_base() -> None:
+    with NamedTemporaryFile("w", delete=False, encoding="utf-8") as dummy_template:
+        dummy_template.write("abc")
+        dummy_template.seek(0)
+        dummy_template.close()
+        a: TemplateBase = A(Path(dummy_template.name))
+    assert str(a.template_file_path) == dummy_template.name
+    assert a._render() == "abc"
+    assert not str(a)
+
+
+def test_data_model() -> None:
+    field = DataModelFieldBase(name="a", data_type=DataType(type="str"), default="abc", required=True)
+
+    with NamedTemporaryFile("w", delete=False, encoding="utf-8") as dummy_template:
+        dummy_template.write(template)
+        dummy_template.seek(0)
+        dummy_template.close()
+        B.TEMPLATE_FILE_PATH = dummy_template.name
+        data_model = B(
+            fields=[field],
+            decorators=["@validate"],
+            base_classes=[Reference(path="base", original_name="base", name="Base")],
+            reference=Reference(path="test_model", name="test_model"),
+        )
+
+    assert data_model.name == "test_model"
+    assert data_model.fields == [field]
+    assert data_model.decorators == ["@validate"]
+    assert data_model.base_class == "Base"
+    assert data_model.render() == "@validate\n@dataclass\nclass test_model:\n    a: str"
+
+
+def test_data_model_exception() -> None:
+    field = DataModelFieldBase(name="a", data_type=DataType(type="str"), default="abc", required=True)
+    with pytest.raises(Exception, match="TEMPLATE_FILE_PATH is undefined"):
+        C(
+            fields=[field],
+            reference=Reference(path="abc", original_name="abc", name="abc"),
+        )
+
+
+def test_data_field() -> None:
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(is_list=True),
+        required=True,
+        is_list=True,
+        is_union=True,
+    )
+    assert field.type_hint == "List"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(is_list=True),
+        required=True,
+        is_list=True,
+        is_union=False,
+    )
+    assert field.type_hint == "List"
+    field = DataModelFieldBase(name="a", data_type=DataType(), required=False)
+    assert field.type_hint == "None"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(is_list=True),
+        required=False,
+        is_list=True,
+        is_union=True,
+    )
+    assert field.type_hint == "Optional[List]"
+    field = DataModelFieldBase(name="a", data_type=DataType(), required=False, is_list=False, is_union=True)
+    assert field.type_hint == "None"
+    field = DataModelFieldBase(name="a", data_type=DataType(), required=False, is_list=False, is_union=False)
+    assert field.type_hint == "None"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(is_list=True),
+        required=False,
+        is_list=True,
+        is_union=False,
+    )
+    assert field.type_hint == "Optional[List]"
+    field = DataModelFieldBase(name="a", data_type=DataType(type="str"), required=True)
+    assert field.type_hint == "str"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(type="str", is_list=True),
+        required=True,
+    )
+    assert field.type_hint == "List[str]"
+    field = DataModelFieldBase(name="a", data_type=DataType(type="str"), required=True)
+    assert field.type_hint == "str"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(type="str"),
+        required=True,
+    )
+    assert field.type_hint == "str"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(type="str", is_list=True),
+        required=True,
+    )
+    assert field.type_hint == "List[str]"
+    field = DataModelFieldBase(name="a", data_type=DataType(type="str"), required=False)
+    assert field.type_hint == "Optional[str]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(
+            type="str",
+            is_list=True,
+        ),
+        required=False,
+    )
+    assert field.type_hint == "Optional[List[str]]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(type="str"),
+        required=False,
+    )
+    assert field.type_hint == "Optional[str]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(type="str"),
+        required=False,
+    )
+    assert field.type_hint == "Optional[str]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(
+            type="str",
+            is_list=True,
+        ),
+        required=False,
+    )
+    assert field.type_hint == "Optional[List[str]]"
+
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(data_types=[DataType(type="str"), DataType(type="int")]),
+        required=True,
+    )
+    assert field.type_hint == "Union[str, int]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(
+            data_types=[DataType(type="str"), DataType(type="int")],
+            is_list=True,
+        ),
+        required=True,
+    )
+    assert field.type_hint == "List[Union[str, int]]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(data_types=[DataType(type="str"), DataType(type="int")]),
+        required=True,
+    )
+    assert field.type_hint == "Union[str, int]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(data_types=[DataType(type="str"), DataType(type="int")]),
+        required=True,
+    )
+    assert field.type_hint == "Union[str, int]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(data_types=[DataType(type="str"), DataType(type="int")], is_list=True),
+        required=True,
+    )
+    assert field.type_hint == "List[Union[str, int]]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(data_types=[DataType(type="str"), DataType(type="int")]),
+        required=False,
+    )
+    assert field.type_hint == "Optional[Union[str, int]]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(
+            data_types=[DataType(type="str"), DataType(type="int")],
+            is_list=True,
+        ),
+        required=False,
+    )
+    assert field.type_hint == "Optional[List[Union[str, int]]]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(data_types=[DataType(type="str"), DataType(type="int")]),
+        required=False,
+    )
+    assert field.type_hint == "Optional[Union[str, int]]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(data_types=[DataType(type="str"), DataType(type="int")]),
+        required=False,
+    )
+    assert field.type_hint == "Optional[Union[str, int]]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(data_types=[DataType(type="str"), DataType(type="int")], is_list=True),
+        required=False,
+    )
+    assert field.type_hint == "Optional[List[Union[str, int]]]"
+
+    field = DataModelFieldBase(name="a", data_type=DataType(is_list=True), required=False)
+    assert field.type_hint == "Optional[List]"
+
+
+@pytest.mark.parametrize(
+    ("name", "expected_true", "expected_false"),
+    [
+        ("array-commons.schema", "array_commons.schema", "array_commons_schema"),
+        ("123filename", "_123filename", "_123filename"),
+        ("normal_filename", "normal_filename", "normal_filename"),
+        ("file!name", "file_name", "file_name"),
+        ("", "", ""),
+    ],
+)
+@pytest.mark.parametrize("treat_dot_as_module", [True, False])
+def test_sanitize_module_name(name: str, expected_true: str, expected_false: str, treat_dot_as_module: bool) -> None:
+    expected = expected_true if treat_dot_as_module else expected_false
+    assert sanitize_module_name(name, treat_dot_as_module=treat_dot_as_module) == expected
+
+
+@pytest.mark.parametrize(
+    ("treat_dot_as_module", "expected"),
+    [
+        (True, ["inputs", "array_commons.schema", "array-commons"]),
+        (False, ["inputs", "array_commons_schema", "array-commons"]),
+    ],
+)
+def test_get_module_path_with_file_path(treat_dot_as_module: bool, expected: list[str]) -> None:
+    file_path = Path("inputs/array-commons.schema.json")
+    result = get_module_path("array-commons.schema", file_path, treat_dot_as_module=treat_dot_as_module)
+    assert result == expected
+
+
+@pytest.mark.parametrize("treat_dot_as_module", [True, False])
+def test_get_module_path_without_file_path(treat_dot_as_module: bool) -> None:
+    result = get_module_path("my_module.submodule", None, treat_dot_as_module=treat_dot_as_module)
+    expected = ["my_module"]
+    assert result == expected
+
+
+@pytest.mark.parametrize(
+    ("treat_dot_as_module", "name", "expected"),
+    [
+        (True, "a.b.c", ["a", "b"]),
+        (True, "simple", []),
+        (True, "with.dot", ["with"]),
+        (False, "a.b.c", ["a", "b"]),
+        (False, "simple", []),
+        (False, "with.dot", ["with"]),
+    ],
+)
+def test_get_module_path_without_file_path_parametrized(
+    treat_dot_as_module: bool, name: str, expected: list[str]
+) -> None:
+    result = get_module_path(name, None, treat_dot_as_module=treat_dot_as_module)
+    assert result == expected
diff -pruN 0.26.4-3/tests/model/test_dataclass.py 0.34.0-1/tests/model/test_dataclass.py
--- 0.26.4-3/tests/model/test_dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/model/test_dataclass.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,31 @@
+from __future__ import annotations
+
+from datamodel_code_generator.model.dataclass import DataModelField
+from datamodel_code_generator.types import DataType
+
+
+def test_data_model_field_process_const() -> None:
+    """Test process_const method functionality."""
+    field = DataModelField(name="test_field", data_type=DataType(type="str"), required=True, extras={"const": "v1"})
+
+    field.process_const()
+
+    assert field.const is True
+    assert field.nullable is False
+    assert field.data_type.literals == ["v1"]
+    assert field.default == "v1"
+
+
+def test_data_model_field_process_const_no_const() -> None:
+    """Test process_const when no const is in extras."""
+    field = DataModelField(name="test_field", data_type=DataType(type="str"), required=True, extras={})
+
+    original_nullable = field.nullable
+    original_default = field.default
+    original_const = field.const
+
+    field.process_const()
+
+    assert field.const == original_const
+    assert field.nullable == original_nullable
+    assert field.default == original_default
diff -pruN 0.26.4-3/tests/model/test_typed_dict.py 0.34.0-1/tests/model/test_typed_dict.py
--- 0.26.4-3/tests/model/test_typed_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/model/test_typed_dict.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,31 @@
+from __future__ import annotations
+
+from datamodel_code_generator.model.typed_dict import DataModelField
+from datamodel_code_generator.types import DataType
+
+
+def test_data_model_field_process_const() -> None:
+    """Test process_const method functionality."""
+    field = DataModelField(name="test_field", data_type=DataType(type="str"), required=True, extras={"const": "v1"})
+
+    field.process_const()
+
+    assert field.const is True
+    assert field.nullable is False
+    assert field.data_type.literals == ["v1"]
+    assert field.default == "v1"
+
+
+def test_data_model_field_process_const_no_const() -> None:
+    """Test process_const when no const is in extras."""
+    field = DataModelField(name="test_field", data_type=DataType(type="str"), required=True, extras={})
+
+    original_nullable = field.nullable
+    original_default = field.default
+    original_const = field.const
+
+    field.process_const()
+
+    assert field.const == original_const
+    assert field.nullable == original_nullable
+    assert field.default == original_default
diff -pruN 0.26.4-3/tests/parser/test_base.py 0.34.0-1/tests/parser/test_base.py
--- 0.26.4-3/tests/parser/test_base.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/parser/test_base.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,452 @@
+from __future__ import annotations
+
+from collections import OrderedDict
+from typing import Any
+
+import pytest
+
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model.pydantic import BaseModel, DataModelField
+from datamodel_code_generator.parser.base import (
+    Parser,
+    escape_characters,
+    exact_import,
+    relative,
+    sort_data_models,
+)
+from datamodel_code_generator.reference import Reference, snake_to_upper_camel
+from datamodel_code_generator.types import DataType
+
+
+class A(DataModel):
+    pass
+
+
+class B(DataModel):
+    pass
+
+
+class C(Parser):
+    def parse_raw(self, name: str, raw: dict[str, Any]) -> None:
+        pass
+
+    def parse(self) -> str:  # noqa: PLR6301
+        return "parsed"
+
+
+def test_parser() -> None:
+    c = C(
+        data_model_type=D,
+        data_model_root_type=B,
+        data_model_field_type=DataModelFieldBase,
+        base_class="Base",
+        source="",
+    )
+    assert c.data_model_type == D
+    assert c.data_model_root_type == B
+    assert c.data_model_field_type == DataModelFieldBase
+    assert c.base_class == "Base"
+
+
+def test_sort_data_models() -> None:
+    reference_a = Reference(path="A", original_name="A", name="A")
+    reference_b = Reference(path="B", original_name="B", name="B")
+    reference_c = Reference(path="C", original_name="C", name="C")
+    data_type_a = DataType(reference=reference_a)
+    data_type_b = DataType(reference=reference_b)
+    data_type_c = DataType(reference=reference_c)
+    reference = [
+        BaseModel(
+            fields=[
+                DataModelField(data_type=data_type_a),
+                DataModelFieldBase(data_type=data_type_c),
+            ],
+            reference=reference_a,
+        ),
+        BaseModel(
+            fields=[DataModelField(data_type=data_type_b)],
+            reference=reference_b,
+        ),
+        BaseModel(
+            fields=[DataModelField(data_type=data_type_b)],
+            reference=reference_c,
+        ),
+    ]
+
+    unresolved, resolved, require_update_action_models = sort_data_models(reference)
+    expected = OrderedDict()
+    expected["B"] = reference[1]
+    expected["C"] = reference[2]
+    expected["A"] = reference[0]
+
+    assert resolved == expected
+    assert unresolved == []
+    assert require_update_action_models == ["B", "A"]
+
+
+def test_sort_data_models_unresolved() -> None:
+    reference_a = Reference(path="A", original_name="A", name="A")
+    reference_b = Reference(path="B", original_name="B", name="B")
+    reference_c = Reference(path="C", original_name="C", name="C")
+    reference_d = Reference(path="D", original_name="D", name="D")
+    reference_v = Reference(path="V", original_name="V", name="V")
+    reference_z = Reference(path="Z", original_name="Z", name="Z")
+    data_type_a = DataType(reference=reference_a)
+    data_type_b = DataType(reference=reference_b)
+    data_type_c = DataType(reference=reference_c)
+    data_type_v = DataType(reference=reference_v)
+    data_type_z = DataType(reference=reference_z)
+    reference = [
+        BaseModel(
+            fields=[
+                DataModelField(data_type=data_type_a),
+                DataModelFieldBase(data_type=data_type_c),
+            ],
+            reference=reference_a,
+        ),
+        BaseModel(
+            fields=[DataModelField(data_type=data_type_b)],
+            reference=reference_b,
+        ),
+        BaseModel(
+            fields=[DataModelField(data_type=data_type_b)],
+            reference=reference_c,
+        ),
+        BaseModel(
+            fields=[
+                DataModelField(data_type=data_type_a),
+                DataModelField(data_type=data_type_c),
+                DataModelField(data_type=data_type_z),
+            ],
+            reference=reference_d,
+        ),
+        BaseModel(
+            fields=[DataModelField(data_type=data_type_v)],
+            reference=reference_z,
+        ),
+    ]
+
+    with pytest.raises(Exception):  # noqa: B017, PT011
+        sort_data_models(reference)
+
+
+def test_sort_data_models_unresolved_raise_recursion_error() -> None:
+    reference_a = Reference(path="A", original_name="A", name="A")
+    reference_b = Reference(path="B", original_name="B", name="B")
+    reference_c = Reference(path="C", original_name="C", name="C")
+    reference_d = Reference(path="D", original_name="D", name="D")
+    reference_v = Reference(path="V", original_name="V", name="V")
+    reference_z = Reference(path="Z", original_name="Z", name="Z")
+    data_type_a = DataType(reference=reference_a)
+    data_type_b = DataType(reference=reference_b)
+    data_type_c = DataType(reference=reference_c)
+    data_type_v = DataType(reference=reference_v)
+    data_type_z = DataType(reference=reference_z)
+    reference = [
+        BaseModel(
+            fields=[
+                DataModelField(data_type=data_type_a),
+                DataModelFieldBase(data_type=data_type_c),
+            ],
+            reference=reference_a,
+        ),
+        BaseModel(
+            fields=[DataModelField(data_type=data_type_b)],
+            reference=reference_b,
+        ),
+        BaseModel(
+            fields=[DataModelField(data_type=data_type_b)],
+            reference=reference_c,
+        ),
+        BaseModel(
+            fields=[
+                DataModelField(data_type=data_type_a),
+                DataModelField(data_type=data_type_c),
+                DataModelField(data_type=data_type_z),
+            ],
+            reference=reference_d,
+        ),
+        BaseModel(
+            fields=[DataModelField(data_type=data_type_v)],
+            reference=reference_z,
+        ),
+    ]
+
+    with pytest.raises(Exception):  # noqa: B017, PT011
+        sort_data_models(reference, recursion_count=100000)
+
+
+@pytest.mark.parametrize(
+    ("current_module", "reference", "val"),
+    [
+        ("", "Foo", ("", "")),
+        ("a", "a.Foo", ("", "")),
+        ("a", "a.b.Foo", (".", "b")),
+        ("a.b", "a.Foo", (".", "Foo")),
+        ("a.b.c", "a.Foo", ("..", "Foo")),
+        ("a.b.c", "Foo", ("...", "Foo")),
+    ],
+)
+def test_relative(current_module: str, reference: str, val: tuple[str, str]) -> None:
+    assert relative(current_module, reference) == val
+
+
+@pytest.mark.parametrize(
+    ("from_", "import_", "name", "val"),
+    [
+        (".", "mod", "Foo", (".mod", "Foo")),
+        ("..", "mod", "Foo", ("..mod", "Foo")),
+        (".a", "mod", "Foo", (".a.mod", "Foo")),
+        ("..a", "mod", "Foo", ("..a.mod", "Foo")),
+        ("..a.b", "mod", "Foo", ("..a.b.mod", "Foo")),
+    ],
+)
+def test_exact_import(from_: str, import_: str, name: str, val: tuple[str, str]) -> None:
+    assert exact_import(from_, import_, name) == val
+
+
+@pytest.mark.parametrize(
+    ("word", "expected"),
+    [
+        (
+            "_hello",
+            "_Hello",
+        ),  # In case a name starts with a underline, we should keep it.
+        ("hello_again", "HelloAgain"),  # regular snake case
+        ("hello__again", "HelloAgain"),  # handles double underscores
+        (
+            "hello___again_again",
+            "HelloAgainAgain",
+        ),  # handles double and single underscores
+        ("hello_again_", "HelloAgain"),  # handles trailing underscores
+        ("hello", "Hello"),  # no underscores
+        ("____", "_"),  # degenerate case, but this is the current expected behavior
+    ],
+)
+def test_snake_to_upper_camel(word: str, expected: str) -> None:
+    """Tests the snake to upper camel function."""
+    actual = snake_to_upper_camel(word)
+    assert actual == expected
+
+
+class D(DataModel):
+    def __init__(self, filename: str, data: str, fields: list[DataModelFieldBase]) -> None:  # noqa: ARG002
+        super().__init__(fields=fields, reference=Reference(""))
+        self._data = data
+
+    def render(self) -> str:
+        return self._data
+
+
+def test_additional_imports() -> None:
+    """Test that additional imports are inside imports container."""
+    new_parser = C(
+        source="",
+        additional_imports=["collections.deque"],
+    )
+    assert len(new_parser.imports) == 1
+    assert new_parser.imports["collections"] == {"deque"}
+
+
+def test_no_additional_imports() -> None:
+    """Test that not additional imports are not affecting imports container."""
+    new_parser = C(
+        source="",
+    )
+    assert len(new_parser.imports) == 0
+
+
+@pytest.mark.parametrize(
+    ("input_data", "expected"),
+    [
+        (
+            {
+                ("folder1", "module1.py"): "content1",
+                ("folder1", "module2.py"): "content2",
+                ("folder1", "__init__.py"): "init_content",
+            },
+            {
+                ("folder1", "module1.py"): "content1",
+                ("folder1", "module2.py"): "content2",
+                ("folder1", "__init__.py"): "init_content",
+            },
+        ),
+        (
+            {
+                ("folder1.module", "file.py"): "content1",
+                ("folder1.module", "__init__.py"): "init_content",
+            },
+            {
+                ("folder1", "module", "file.py"): "content1",
+                ("folder1", "__init__.py"): "init_content",
+                ("folder1", "module", "__init__.py"): "init_content",
+            },
+        ),
+    ],
+)
+def test_postprocess_result_modules(input_data: Any, expected: Any) -> None:
+    result = Parser._Parser__postprocess_result_modules(input_data)
+    assert result == expected
+
+
+def test_find_member_with_integer_enum() -> None:
+    """Test find_member method with integer enum values"""
+    from datamodel_code_generator.model.enum import Enum  # noqa: PLC0415
+    from datamodel_code_generator.model.pydantic.base_model import DataModelField  # noqa: PLC0415
+    from datamodel_code_generator.reference import Reference  # noqa: PLC0415
+    from datamodel_code_generator.types import DataType  # noqa: PLC0415
+
+    # Create test Enum with integer values
+    enum = Enum(
+        reference=Reference(path="test_path", original_name="TestEnum", name="TestEnum"),
+        fields=[
+            DataModelField(
+                name="VALUE_1000",
+                default="1000",
+                data_type=DataType(type="int"),
+                required=True,
+            ),
+            DataModelField(
+                name="VALUE_100",
+                default="100",
+                data_type=DataType(type="int"),
+                required=True,
+            ),
+            DataModelField(
+                name="VALUE_0",
+                default="0",
+                data_type=DataType(type="int"),
+                required=True,
+            ),
+        ],
+    )
+
+    # Test finding members with integer values
+    assert enum.find_member(1000).field.name == "VALUE_1000"
+    assert enum.find_member(100).field.name == "VALUE_100"
+    assert enum.find_member(0).field.name == "VALUE_0"
+
+    # Test with string representations
+    assert enum.find_member("1000").field.name == "VALUE_1000"
+    assert enum.find_member("100").field.name == "VALUE_100"
+    assert enum.find_member("0").field.name == "VALUE_0"
+
+    # Test with non-existent values
+    assert enum.find_member(999) is None
+    assert enum.find_member("999") is None
+
+
+def test_find_member_with_string_enum() -> None:
+    from datamodel_code_generator.model.enum import Enum  # noqa: PLC0415
+    from datamodel_code_generator.model.pydantic.base_model import DataModelField  # noqa: PLC0415
+    from datamodel_code_generator.reference import Reference  # noqa: PLC0415
+    from datamodel_code_generator.types import DataType  # noqa: PLC0415
+
+    enum = Enum(
+        reference=Reference(path="test_path", original_name="TestEnum", name="TestEnum"),
+        fields=[
+            DataModelField(
+                name="VALUE_A",
+                default="'value_a'",
+                data_type=DataType(type="str"),
+                required=True,
+            ),
+            DataModelField(
+                name="VALUE_B",
+                default="'value_b'",
+                data_type=DataType(type="str"),
+                required=True,
+            ),
+        ],
+    )
+
+    member = enum.find_member("value_a")
+    assert member is not None
+    assert member.field.name == "VALUE_A"
+
+    member = enum.find_member("value_b")
+    assert member is not None
+    assert member.field.name == "VALUE_B"
+
+    member = enum.find_member("'value_a'")
+    assert member is not None
+    assert member.field.name == "VALUE_A"
+
+
+def test_find_member_with_mixed_enum() -> None:
+    from datamodel_code_generator.model.enum import Enum  # noqa: PLC0415
+    from datamodel_code_generator.model.pydantic.base_model import DataModelField  # noqa: PLC0415
+    from datamodel_code_generator.reference import Reference  # noqa: PLC0415
+    from datamodel_code_generator.types import DataType  # noqa: PLC0415
+
+    enum = Enum(
+        reference=Reference(path="test_path", original_name="TestEnum", name="TestEnum"),
+        fields=[
+            DataModelField(
+                name="INT_VALUE",
+                default="100",
+                data_type=DataType(type="int"),
+                required=True,
+            ),
+            DataModelField(
+                name="STR_VALUE",
+                default="'value_a'",
+                data_type=DataType(type="str"),
+                required=True,
+            ),
+        ],
+    )
+
+    member = enum.find_member(100)
+    assert member is not None
+    assert member.field.name == "INT_VALUE"
+
+    member = enum.find_member("100")
+    assert member is not None
+    assert member.field.name == "INT_VALUE"
+
+    member = enum.find_member("value_a")
+    assert member is not None
+    assert member.field.name == "STR_VALUE"
+
+    member = enum.find_member("'value_a'")
+    assert member is not None
+    assert member.field.name == "STR_VALUE"
+
+
+@pytest.fixture
+def escape_map() -> dict[str, str]:
+    return {
+        "\u0000": r"\x00",  # Null byte
+        "'": r"\'",
+        "\b": r"\b",
+        "\f": r"\f",
+        "\n": r"\n",
+        "\r": r"\r",
+        "\t": r"\t",
+        "\\": r"\\",
+    }
+
+
+@pytest.mark.parametrize(
+    ("input_str", "expected"),
+    [
+        ("\u0000", r"\x00"),  # Test null byte
+        ("'", r"\'"),  # Test single quote
+        ("\b", r"\b"),  # Test backspace
+        ("\f", r"\f"),  # Test form feed
+        ("\n", r"\n"),  # Test newline
+        ("\r", r"\r"),  # Test carriage return
+        ("\t", r"\t"),  # Test tab
+        ("\\", r"\\"),  # Test backslash
+    ],
+)
+def test_character_escaping(input_str: str, expected: str) -> None:
+    assert input_str.translate(escape_characters) == expected
+
+
+@pytest.mark.parametrize("flag", [True, False])
+def test_use_non_positive_negative_number_constrained_types(flag: bool) -> None:
+    instance = C(source="", use_non_positive_negative_number_constrained_types=flag)
+
+    assert instance.data_type_manager.use_non_positive_negative_number_constrained_types == flag
diff -pruN 0.26.4-3/tests/parser/test_graphql.py 0.34.0-1/tests/parser/test_graphql.py
--- 0.26.4-3/tests/parser/test_graphql.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/parser/test_graphql.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,68 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from freezegun import freeze_time
+
+from datamodel_code_generator.__main__ import Exit, main
+from tests.main.test_main_general import DATA_PATH
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+GRAPHQL_DATA_PATH: Path = DATA_PATH / "graphql"
+EXPECTED_GRAPHQL_PATH: Path = DATA_PATH / "expected" / "parser" / "graphql"
+
+
+@freeze_time("2019-07-26")
+def test_graphql_field_enum(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "field-default-enum.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+        "--set-default-enum-member",
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_GRAPHQL_PATH / "field-default-enum.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_graphql_union_aliased_bug(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "union-aliased-bug.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+    ])
+    assert return_code == Exit.OK
+    actual = output_file.read_text(encoding="utf-8").rstrip()
+    expected = (EXPECTED_GRAPHQL_PATH / "union-aliased-bug.py").read_text().rstrip()
+    if actual != expected:
+        pass
+    assert actual == expected
+
+
+@freeze_time("2019-07-26")
+def test_graphql_union_commented(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(GRAPHQL_DATA_PATH / "union-commented.graphql"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "graphql",
+    ])
+    assert return_code == Exit.OK
+    actual = output_file.read_text(encoding="utf-8").rstrip()
+    expected = (EXPECTED_GRAPHQL_PATH / "union-commented.py").read_text().rstrip()
+    if actual != expected:
+        pass
+    assert actual == expected
diff -pruN 0.26.4-3/tests/parser/test_jsonschema.py 0.34.0-1/tests/parser/test_jsonschema.py
--- 0.26.4-3/tests/parser/test_jsonschema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/parser/test_jsonschema.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,512 @@
+from __future__ import annotations
+
+import json
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, Optional, Union
+from unittest.mock import call
+
+import pydantic
+import pytest
+import yaml
+
+from datamodel_code_generator.imports import Import
+from datamodel_code_generator.model import DataModelFieldBase
+from datamodel_code_generator.parser.base import dump_templates
+from datamodel_code_generator.parser.jsonschema import (
+    JsonSchemaObject,
+    JsonSchemaParser,
+    get_model_by_path,
+)
+from datamodel_code_generator.types import DataType
+
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+DATA_PATH: Path = Path(__file__).parents[1] / "data" / "jsonschema"
+
+EXPECTED_JSONSCHEMA_PATH = Path(__file__).parents[1] / "data" / "expected" / "parser" / "jsonschema"
+
+
+@pytest.mark.parametrize(
+    ("schema", "path", "model"),
+    [
+        ({"foo": "bar"}, None, {"foo": "bar"}),
+        ({"a": {"foo": "bar"}}, "a", {"foo": "bar"}),
+        ({"a": {"b": {"foo": "bar"}}}, "a/b", {"foo": "bar"}),
+        ({"a": {"b": {"c": {"foo": "bar"}}}}, "a/b", {"c": {"foo": "bar"}}),
+        ({"a": {"b": {"c": {"foo": "bar"}}}}, "a/b/c", {"foo": "bar"}),
+    ],
+)
+def test_get_model_by_path(schema: dict, path: str, model: dict) -> None:
+    assert get_model_by_path(schema, path.split("/") if path else []) == model
+
+
+def test_json_schema_object_ref_url_json(mocker: MockerFixture) -> None:
+    parser = JsonSchemaParser("")
+    obj = JsonSchemaObject.parse_obj({"$ref": "https://example.com/person.schema.json#/definitions/User"})
+    mock_get = mocker.patch("httpx.get")
+    mock_get.return_value.text = json.dumps(
+        {
+            "$id": "https://example.com/person.schema.json",
+            "$schema": "http://json-schema.org/draft-07/schema#",
+            "definitions": {
+                "User": {
+                    "type": "object",
+                    "properties": {
+                        "name": {
+                            "type": "string",
+                        }
+                    },
+                }
+            },
+        },
+    )
+
+    parser.parse_ref(obj, ["Model"])
+    assert (
+        dump_templates(list(parser.results))
+        == """class User(BaseModel):
+    name: Optional[str] = None"""
+    )
+    parser.parse_ref(obj, ["Model"])
+    mock_get.assert_has_calls([
+        call(
+            "https://example.com/person.schema.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+    ])
+
+
+def test_json_schema_object_ref_url_yaml(mocker: MockerFixture) -> None:
+    parser = JsonSchemaParser("")
+    obj = JsonSchemaObject.parse_obj({"$ref": "https://example.org/schema.yaml#/definitions/User"})
+    mock_get = mocker.patch("httpx.get")
+    mock_get.return_value.text = yaml.safe_dump(json.load((DATA_PATH / "user.json").open()))
+
+    parser.parse_ref(obj, ["User"])
+    assert (
+        dump_templates(list(parser.results))
+        == """class User(BaseModel):
+    name: Optional[str] = Field(None, example='ken')
+    pets: List[User] = Field(default_factory=list)
+
+
+class Pet(BaseModel):
+    name: Optional[str] = Field(None, examples=['dog', 'cat'])"""
+    )
+    parser.parse_ref(obj, [])
+    mock_get.assert_called_once_with(
+        "https://example.org/schema.yaml",
+        headers=None,
+        verify=True,
+        follow_redirects=True,
+        params=None,
+    )
+
+
+def test_json_schema_object_cached_ref_url_yaml(mocker: MockerFixture) -> None:
+    parser = JsonSchemaParser("")
+
+    obj = JsonSchemaObject.parse_obj({
+        "type": "object",
+        "properties": {
+            "pet": {"$ref": "https://example.org/schema.yaml#/definitions/Pet"},
+            "user": {"$ref": "https://example.org/schema.yaml#/definitions/User"},
+        },
+    })
+    mock_get = mocker.patch("httpx.get")
+    mock_get.return_value.text = yaml.safe_dump(json.load((DATA_PATH / "user.json").open()))
+
+    parser.parse_ref(obj, [])
+    assert (
+        dump_templates(list(parser.results))
+        == """class Pet(BaseModel):
+    name: Optional[str] = Field(None, examples=['dog', 'cat'])
+
+
+class User(BaseModel):
+    name: Optional[str] = Field(None, example='ken')
+    pets: List[User] = Field(default_factory=list)"""
+    )
+    mock_get.assert_called_once_with(
+        "https://example.org/schema.yaml",
+        headers=None,
+        verify=True,
+        follow_redirects=True,
+        params=None,
+    )
+
+
+def test_json_schema_ref_url_json(mocker: MockerFixture) -> None:
+    parser = JsonSchemaParser("")
+    obj = {
+        "type": "object",
+        "properties": {"user": {"$ref": "https://example.org/schema.json#/definitions/User"}},
+    }
+    mock_get = mocker.patch("httpx.get")
+    mock_get.return_value.text = json.dumps(json.load((DATA_PATH / "user.json").open()))
+
+    parser.parse_raw_obj("Model", obj, ["Model"])
+    assert (
+        dump_templates(list(parser.results))
+        == """class Model(BaseModel):
+    user: Optional[User] = None
+
+
+class User(BaseModel):
+    name: Optional[str] = Field(None, example='ken')
+    pets: List[User] = Field(default_factory=list)
+
+
+class Pet(BaseModel):
+    name: Optional[str] = Field(None, examples=['dog', 'cat'])"""
+    )
+    mock_get.assert_called_once_with(
+        "https://example.org/schema.json",
+        headers=None,
+        verify=True,
+        follow_redirects=True,
+        params=None,
+    )
+
+
+@pytest.mark.parametrize(
+    ("source_obj", "generated_classes"),
+    [
+        (
+            {
+                "$id": "https://example.com/person.schema.json",
+                "$schema": "http://json-schema.org/draft-07/schema#",
+                "title": "Person",
+                "type": "object",
+                "properties": {
+                    "firstName": {
+                        "type": "string",
+                        "description": "The person's first name.",
+                    },
+                    "lastName": {
+                        "type": "string",
+                        "description": "The person's last name.",
+                    },
+                    "age": {
+                        "description": "Age in years which must be equal to or greater than zero.",
+                        "type": "integer",
+                        "minimum": 0,
+                    },
+                },
+            },
+            """class Person(BaseModel):
+    firstName: Optional[str] = None
+    lastName: Optional[str] = None
+    age: Optional[conint(ge=0)] = None""",
+        ),
+        (
+            {
+                "$id": "https://example.com/person.schema.json",
+                "$schema": "http://json-schema.org/draft-07/schema#",
+                "title": "person-object",
+                "type": "object",
+                "properties": {
+                    "name": {
+                        "type": "string",
+                        "description": "The person's name.",
+                    },
+                    "home-address": {
+                        "$ref": "#/definitions/home-address",
+                        "description": "The person's home address.",
+                    },
+                },
+                "definitions": {
+                    "home-address": {
+                        "type": "object",
+                        "properties": {
+                            "street-address": {"type": "string"},
+                            "city": {"type": "string"},
+                            "state": {"type": "string"},
+                        },
+                        "required": ["street_address", "city", "state"],
+                    }
+                },
+            },
+            """class Person(BaseModel):
+    name: Optional[str] = None
+    home_address: Optional[HomeAddress] = None""",
+        ),
+    ],
+)
+def test_parse_object(source_obj: dict[str, Any], generated_classes: str) -> None:
+    parser = JsonSchemaParser(
+        data_model_field_type=DataModelFieldBase,
+        source="",
+    )
+    parser.parse_object("Person", JsonSchemaObject.parse_obj(source_obj), [])
+    assert dump_templates(list(parser.results)) == generated_classes
+
+
+@pytest.mark.parametrize(
+    ("source_obj", "generated_classes"),
+    [
+        (
+            {
+                "$id": "https://example.com/person.schema.json",
+                "$schema": "http://json-schema.org/draft-07/schema#",
+                "title": "AnyJson",
+                "description": "This field accepts any object",
+                "discriminator": "type",
+            },
+            """class AnyObject(BaseModel):
+    __root__: Any = Field(..., description='This field accepts any object', discriminator='type', title='AnyJson')""",
+        )
+    ],
+)
+def test_parse_any_root_object(source_obj: dict[str, Any], generated_classes: str) -> None:
+    parser = JsonSchemaParser("")
+    parser.parse_root_type("AnyObject", JsonSchemaObject.parse_obj(source_obj), [])
+    assert dump_templates(list(parser.results)) == generated_classes
+
+
+@pytest.mark.parametrize(
+    ("source_obj", "generated_classes"),
+    [
+        (
+            yaml.safe_load((DATA_PATH / "oneof.json").read_text()),
+            (DATA_PATH / "oneof.json.snapshot").read_text(),
+        )
+    ],
+)
+def test_parse_one_of_object(source_obj: dict[str, Any], generated_classes: str) -> None:
+    parser = JsonSchemaParser("")
+    parser.parse_raw_obj("onOfObject", source_obj, [])
+    assert dump_templates(list(parser.results)) == generated_classes
+
+
+@pytest.mark.parametrize(
+    ("source_obj", "generated_classes"),
+    [
+        (
+            {
+                "$id": "https://example.com/person.schema.json",
+                "$schema": "http://json-schema.org/draft-07/schema#",
+                "title": "defaults",
+                "type": "object",
+                "properties": {
+                    "string": {
+                        "type": "string",
+                        "default": "default string",
+                    },
+                    "string_on_field": {
+                        "type": "string",
+                        "default": "default string",
+                        "description": "description",
+                    },
+                    "number": {"type": "number", "default": 123},
+                    "number_on_field": {
+                        "type": "number",
+                        "default": 123,
+                        "description": "description",
+                    },
+                    "number_array": {"type": "array", "default": [1, 2, 3]},
+                    "string_array": {"type": "array", "default": ["a", "b", "c"]},
+                    "object": {"type": "object", "default": {"key": "value"}},
+                },
+            },
+            """class Defaults(BaseModel):
+    string: Optional[str] = 'default string'
+    string_on_field: Optional[str] = Field('default string', description='description')
+    number: Optional[float] = 123
+    number_on_field: Optional[float] = Field(123, description='description')
+    number_array: Optional[List] = [1, 2, 3]
+    string_array: Optional[List] = ['a', 'b', 'c']
+    object: Optional[Dict[str, Any]] = {'key': 'value'}""",
+        )
+    ],
+)
+def test_parse_default(source_obj: dict[str, Any], generated_classes: str) -> None:
+    parser = JsonSchemaParser("")
+    parser.parse_raw_obj("Defaults", source_obj, [])
+    assert dump_templates(list(parser.results)) == generated_classes
+
+
+def test_parse_array_schema() -> None:
+    parser = JsonSchemaParser("")
+    parser.parse_raw_obj("schema", {"type": "object", "properties": {"name": True}}, [])
+    assert (
+        dump_templates(list(parser.results))
+        == """class Schema(BaseModel):
+    name: Optional[Any] = None"""
+    )
+
+
+def test_parse_nested_array(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = JsonSchemaParser(
+        DATA_PATH / "nested_array.json",
+        data_model_field_type=DataModelFieldBase,
+    )
+    parser.parse()
+    assert dump_templates(list(parser.results)) == (DATA_PATH / "nested_array.json.snapshot").read_text()
+
+
+@pytest.mark.parametrize(
+    ("schema_type", "schema_format", "result_type", "from_", "import_", "use_pendulum"),
+    [
+        ("integer", "int32", "int", None, None, False),
+        ("integer", "int64", "int", None, None, False),
+        ("integer", "date-time", "datetime", "datetime", "datetime", False),
+        ("integer", "date-time", "DateTime", "pendulum", "DateTime", True),
+        ("integer", "unix-time", "int", None, None, False),
+        ("number", "float", "float", None, None, False),
+        ("number", "double", "float", None, None, False),
+        ("number", "time", "time", "datetime", "time", False),
+        ("number", "time", "Time", "pendulum", "Time", True),
+        ("number", "date-time", "datetime", "datetime", "datetime", False),
+        ("number", "date-time", "DateTime", "pendulum", "DateTime", True),
+        ("string", None, "str", None, None, False),
+        ("string", "byte", "str", None, None, False),
+        ("string", "binary", "bytes", None, None, False),
+        ("boolean", None, "bool", None, None, False),
+        ("string", "date", "date", "datetime", "date", False),
+        ("string", "date", "Date", "pendulum", "Date", True),
+        ("string", "date-time", "datetime", "datetime", "datetime", False),
+        ("string", "date-time", "DateTime", "pendulum", "DateTime", True),
+        ("string", "duration", "timedelta", "datetime", "timedelta", False),
+        ("string", "duration", "Duration", "pendulum", "Duration", True),
+        ("string", "path", "Path", "pathlib", "Path", False),
+        ("string", "password", "SecretStr", "pydantic", "SecretStr", False),
+        ("string", "email", "EmailStr", "pydantic", "EmailStr", False),
+        ("string", "uri", "AnyUrl", "pydantic", "AnyUrl", False),
+        ("string", "uri-reference", "str", None, None, False),
+        ("string", "uuid", "UUID", "uuid", "UUID", False),
+        ("string", "uuid1", "UUID1", "pydantic", "UUID1", False),
+        ("string", "uuid2", "UUID2", "pydantic", "UUID2", False),
+        ("string", "uuid3", "UUID3", "pydantic", "UUID3", False),
+        ("string", "uuid4", "UUID4", "pydantic", "UUID4", False),
+        ("string", "uuid5", "UUID5", "pydantic", "UUID5", False),
+        ("string", "ipv4", "IPv4Address", "ipaddress", "IPv4Address", False),
+        ("string", "ipv6", "IPv6Address", "ipaddress", "IPv6Address", False),
+        ("string", "unknown-type", "str", None, None, False),
+    ],
+)
+def test_get_data_type(
+    schema_type: str,
+    schema_format: str,
+    result_type: str,
+    from_: str | None,
+    import_: str | None,
+    use_pendulum: bool,
+) -> None:
+    if from_ and import_:
+        import_: Import | None = Import(from_=from_, import_=import_)
+    else:
+        import_ = None
+
+    parser = JsonSchemaParser("", use_pendulum=use_pendulum)
+    assert (
+        parser.get_data_type(JsonSchemaObject(type=schema_type, format=schema_format)).dict()
+        == DataType(type=result_type, import_=import_).dict()
+    )
+
+
+@pytest.mark.parametrize(
+    ("schema_types", "result_types"),
+    [
+        (["integer", "number"], ["int", "float"]),
+        (["integer", "null"], ["int"]),
+    ],
+)
+def test_get_data_type_array(schema_types: list[str], result_types: list[str]) -> None:
+    parser = JsonSchemaParser("")
+    assert parser.get_data_type(JsonSchemaObject(type=schema_types)) == parser.data_type(
+        data_types=[
+            parser.data_type(
+                type=r,
+            )
+            for r in result_types
+        ],
+        is_optional="null" in schema_types,
+    )
+
+
+def test_additional_imports() -> None:
+    """Test that additional imports are inside imports container."""
+    new_parser = JsonSchemaParser(source="", additional_imports=["collections.deque"])
+    assert len(new_parser.imports) == 1
+    assert new_parser.imports["collections"] == {"deque"}
+
+
+def test_no_additional_imports() -> None:
+    """Test that not additional imports are not affecting imports container."""
+    new_parser = JsonSchemaParser(
+        source="",
+    )
+    assert len(new_parser.imports) == 0
+
+
+@pytest.mark.parametrize(
+    ("source_obj", "generated_classes"),
+    [
+        (
+            {
+                "$id": "https://example.com/person.schema.json",
+                "$schema": "http://json-schema.org/draft-07/schema#",
+                "title": "Person",
+                "type": "object",
+                "properties": {
+                    "firstName": {
+                        "type": "string",
+                        "description": "The person's first name.",
+                        "alt_type": "integer",
+                    },
+                    "lastName": {
+                        "type": "string",
+                        "description": "The person's last name.",
+                        "alt_type": "integer",
+                    },
+                    "age": {
+                        "description": "Age in years which must be equal to or greater than zero.",
+                        "type": "integer",
+                        "minimum": 0,
+                        "alt_type": "number",
+                    },
+                    "real_age": {
+                        "description": "Age in years which must be equal to or greater than zero.",
+                        "type": "integer",
+                        "minimum": 0,
+                    },
+                },
+            },
+            """class Person(BaseModel):
+    firstName: Optional[int] = None
+    lastName: Optional[int] = None
+    age: Optional[confloat(ge=0.0)] = None
+    real_age: Optional[conint(ge=0)] = None""",
+        ),
+    ],
+)
+@pytest.mark.skipif(pydantic.VERSION < "2.0.0", reason="Require Pydantic version 2.0.0 or later ")
+def test_json_schema_parser_extension(source_obj: dict[str, Any], generated_classes: str) -> None:
+    """
+    Contrived example to extend the JsonSchemaParser to support an alt_type, which
+    replaces the type if present.
+    """
+
+    class AltJsonSchemaObject(JsonSchemaObject):
+        properties: Optional[dict[str, Union[AltJsonSchemaObject, bool]]] = None  # noqa: UP007, UP045
+        alt_type: Optional[str] = None  # noqa: UP045
+
+        def model_post_init(self, context: Any) -> None:  # noqa: ARG002
+            if self.alt_type:
+                self.type = self.alt_type
+
+    class AltJsonSchemaParser(JsonSchemaParser):
+        SCHEMA_OBJECT_TYPE = AltJsonSchemaObject
+
+    parser = AltJsonSchemaParser(
+        data_model_field_type=DataModelFieldBase,
+        source="",
+    )
+    parser.parse_object("Person", AltJsonSchemaObject.parse_obj(source_obj), [])
+    assert dump_templates(list(parser.results)) == generated_classes
diff -pruN 0.26.4-3/tests/parser/test_openapi.py 0.34.0-1/tests/parser/test_openapi.py
--- 0.26.4-3/tests/parser/test_openapi.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/parser/test_openapi.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,898 @@
+from __future__ import annotations
+
+import os
+import platform
+from pathlib import Path
+from typing import Any
+
+import black
+import pydantic
+import pytest
+from packaging import version
+
+from datamodel_code_generator import OpenAPIScope, PythonVersionMin
+from datamodel_code_generator.model import DataModelFieldBase
+from datamodel_code_generator.model.pydantic import DataModelField
+from datamodel_code_generator.parser.base import dump_templates
+from datamodel_code_generator.parser.jsonschema import JsonSchemaObject
+from datamodel_code_generator.parser.openapi import (
+    MediaObject,
+    OpenAPIParser,
+    ParameterObject,
+    RequestBodyObject,
+    ResponseObject,
+)
+
+DATA_PATH: Path = Path(__file__).parents[1] / "data" / "openapi"
+
+EXPECTED_OPEN_API_PATH = Path(__file__).parents[1] / "data" / "expected" / "parser" / "openapi"
+
+
+def get_expected_file(
+    test_name: str,
+    with_import: bool,
+    format_: bool,
+    base_class: str | None = None,
+    prefix: str | None = None,
+) -> Path:
+    params: list[str] = []
+    if with_import:
+        params.append("with_import")
+    if format_:
+        params.append("format")
+    if base_class:
+        params.append(base_class)
+    file_name = "_".join(params or "output")
+
+    return EXPECTED_OPEN_API_PATH / test_name / (prefix or "") / f"{file_name}.py"
+
+
+@pytest.mark.parametrize(
+    ("source_obj", "generated_classes"),
+    [
+        (
+            {"properties": {"name": {"type": "string"}}},
+            """class Pets(BaseModel):
+    name: Optional[str] = None""",
+        ),
+        (
+            {
+                "properties": {
+                    "kind": {
+                        "type": "object",
+                        "properties": {"name": {"type": "string"}},
+                    }
+                }
+            },
+            """class Kind(BaseModel):
+    name: Optional[str] = None
+
+
+class Pets(BaseModel):
+    kind: Optional[Kind] = None""",
+        ),
+        (
+            {
+                "properties": {
+                    "Kind": {
+                        "type": "object",
+                        "properties": {"name": {"type": "string"}},
+                    }
+                }
+            },
+            """class Kind(BaseModel):
+    name: Optional[str] = None
+
+
+class Pets(BaseModel):
+    Kind: Optional[Kind] = None""",
+        ),
+        (
+            {
+                "properties": {
+                    "pet_kind": {
+                        "type": "object",
+                        "properties": {"name": {"type": "string"}},
+                    }
+                }
+            },
+            """class PetKind(BaseModel):
+    name: Optional[str] = None
+
+
+class Pets(BaseModel):
+    pet_kind: Optional[PetKind] = None""",
+        ),
+        (
+            {
+                "properties": {
+                    "kind": {
+                        "type": "array",
+                        "items": [
+                            {
+                                "type": "object",
+                                "properties": {"name": {"type": "string"}},
+                            }
+                        ],
+                    }
+                }
+            },
+            """class KindItem(BaseModel):
+    name: Optional[str] = None
+
+
+class Pets(BaseModel):
+    kind: Optional[List[KindItem]] = None""",
+        ),
+        (
+            {"properties": {"kind": {"type": "array", "items": []}}},
+            """class Pets(BaseModel):
+    kind: Optional[List] = None""",
+        ),
+    ],
+)
+def test_parse_object(source_obj: dict[str, Any], generated_classes: str) -> None:
+    parser = OpenAPIParser("")
+    parser.parse_object("Pets", JsonSchemaObject.parse_obj(source_obj), [])
+    assert dump_templates(list(parser.results)) == generated_classes
+
+
+@pytest.mark.parametrize(
+    ("source_obj", "generated_classes"),
+    [
+        (
+            {
+                "type": "array",
+                "items": {"type": "object", "properties": {"name": {"type": "string"}}},
+            },
+            """class Pet(BaseModel):
+    name: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]""",
+        ),
+        (
+            {
+                "type": "array",
+                "items": [{"type": "object", "properties": {"name": {"type": "string"}}}],
+            },
+            """class Pet(BaseModel):
+    name: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]""",
+        ),
+        (
+            {
+                "type": "array",
+                "items": {},
+            },
+            """class Pets(BaseModel):
+    __root__: List""",
+        ),
+    ],
+)
+def test_parse_array(source_obj: dict[str, Any], generated_classes: str) -> None:
+    parser = OpenAPIParser("")
+    parser.parse_array("Pets", JsonSchemaObject.parse_obj(source_obj), [])
+    assert dump_templates(list(parser.results)) == generated_classes
+
+
+@pytest.mark.parametrize(
+    ("with_import", "format_", "base_class"),
+    [
+        (
+            True,
+            True,
+            None,
+        ),
+        (
+            False,
+            True,
+            None,
+        ),
+        (
+            True,
+            False,
+            None,
+        ),
+        (True, True, "custom_module.Base"),
+    ],
+)
+def test_openapi_parser_parse(with_import: bool, format_: bool, base_class: str | None) -> None:
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source=Path(DATA_PATH / "api.yaml"),
+        base_class=base_class,
+    )
+    expected_file = get_expected_file("openapi_parser_parse", with_import, format_, base_class)
+    assert (
+        parser.parse(with_import=with_import, format_=format_, settings_path=DATA_PATH.parent)
+        == expected_file.read_text()
+    )
+
+
+@pytest.mark.parametrize(
+    ("source_obj", "generated_classes"),
+    [
+        (
+            {"type": "string", "nullable": True},
+            """class Name(BaseModel):
+    __root__: Optional[str] = None""",
+        ),
+        (
+            {"type": "string", "nullable": False},
+            """class Name(BaseModel):
+    __root__: str""",
+        ),
+    ],
+)
+def test_parse_root_type(source_obj: dict[str, Any], generated_classes: str) -> None:
+    parser = OpenAPIParser("")
+    parser.parse_root_type("Name", JsonSchemaObject.parse_obj(source_obj), [])
+    assert dump_templates(list(parser.results)) == generated_classes
+
+
+def test_openapi_parser_parse_duplicate_models(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "duplicate_models.yaml"),
+    )
+    assert (
+        parser.parse() == (EXPECTED_OPEN_API_PATH / "openapi_parser_parse_duplicate_models" / "output.py").read_text()
+    )
+
+
+def test_openapi_parser_parse_duplicate_model_with_simplify(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    raw = Path(DATA_PATH / "duplicate_model_simplify.yaml")
+    parser = OpenAPIParser(raw)
+    expected = (EXPECTED_OPEN_API_PATH / "openapi_parser_parse_duplicate_models_simplify" / "output.py").read_text()
+    got = parser.parse()
+    assert got == expected
+
+
+def test_openapi_parser_parse_resolved_models(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "resolved_models.yaml"),
+    )
+    assert parser.parse() == (EXPECTED_OPEN_API_PATH / "openapi_parser_parse_resolved_models" / "output.py").read_text()
+
+
+def test_openapi_parser_parse_lazy_resolved_models(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "lazy_resolved_models.yaml"),
+    )
+    assert (
+        parser.parse()
+        == """from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+    event: Optional[Event] = None
+
+
+class Events(BaseModel):
+    __root__: List[Event]
+
+
+class Results(BaseModel):
+    envets: Optional[List[Events]] = None
+    event: Optional[List[Event]] = None
+"""
+    )
+
+
+def test_openapi_parser_parse_x_enum_varnames(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "x_enum_varnames.yaml"),
+    )
+    assert (
+        parser.parse()
+        == """from __future__ import annotations
+
+from enum import Enum
+
+
+class String(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class UnknownTypeString(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class NamedString(Enum):
+    EQ = '='
+    NE = '!='
+    GT = '>'
+    LT = '<'
+    GE = '>='
+    LE = '<='
+
+
+class NamedNumber(Enum):
+    one = 1
+    two = 2
+    three = 3
+
+
+class Number(Enum):
+    number_1 = 1
+    number_2 = 2
+    number_3 = 3
+
+
+class UnknownTypeNumber(Enum):
+    int_1 = 1
+    int_2 = 2
+    int_3 = 3
+"""
+    )
+
+
+@pytest.mark.skipif(pydantic.VERSION < "1.9.0", reason="Require Pydantic version 1.9.0 or later ")
+def test_openapi_parser_parse_enum_models() -> None:
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "enum_models.yaml").read_text(encoding="utf-8"),
+        target_python_version=PythonVersionMin,
+    )
+    expected_dir = EXPECTED_OPEN_API_PATH / "openapi_parser_parse_enum_models"
+    assert parser.parse() == (expected_dir / "output.py").read_text()
+
+
+def test_openapi_parser_parse_anyof(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "anyof.yaml"),
+    )
+    assert parser.parse() == (EXPECTED_OPEN_API_PATH / "openapi_parser_parse_anyof" / "output.py").read_text()
+
+
+def test_openapi_parser_parse_anyof_required(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "anyof_required.yaml"),
+    )
+    assert parser.parse() == (EXPECTED_OPEN_API_PATH / "openapi_parser_parse_anyof_required" / "output.py").read_text()
+
+
+def test_openapi_parser_parse_nested_anyof(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "nested_anyof.yaml").read_text(encoding="utf-8"),
+    )
+    assert parser.parse() == (EXPECTED_OPEN_API_PATH / "openapi_parser_parse_nested_anyof" / "output.py").read_text()
+
+
+def test_openapi_parser_parse_oneof(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "oneof.yaml"),
+    )
+    assert parser.parse() == (EXPECTED_OPEN_API_PATH / "openapi_parser_parse_oneof" / "output.py").read_text()
+
+
+def test_openapi_parser_parse_nested_oneof(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "nested_oneof.yaml").read_text(encoding="utf-8"),
+    )
+    assert parser.parse() == (EXPECTED_OPEN_API_PATH / "openapi_parser_parse_nested_oneof" / "output.py").read_text()
+
+
+def test_openapi_parser_parse_allof_ref(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "allof_same_prefix_with_ref.yaml"),
+    )
+    assert (
+        parser.parse()
+        == (EXPECTED_OPEN_API_PATH / "openapi_parser_parse_allof_same_prefix_with_ref" / "output.py").read_text()
+    )
+
+
+def test_openapi_parser_parse_allof(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "allof.yaml"),
+    )
+    assert parser.parse() == (EXPECTED_OPEN_API_PATH / "openapi_parser_parse_allof" / "output.py").read_text()
+
+
+def test_openapi_parser_parse_allof_required_fields(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "allof_required_fields.yaml"),
+    )
+    assert (
+        parser.parse()
+        == (EXPECTED_OPEN_API_PATH / "openapi_parser_parse_allof_required_fields" / "output.py").read_text()
+    )
+
+
+def test_openapi_parser_parse_alias(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "alias.yaml"),
+    )
+    delimiter = "\\" if platform.system() == "Windows" else "/"
+    results = {delimiter.join(p): r for p, r in parser.parse().items()}
+    openapi_parser_parse_alias_dir = EXPECTED_OPEN_API_PATH / "openapi_parser_parse_alias"
+    for path in openapi_parser_parse_alias_dir.rglob("*.py"):
+        key = str(path.relative_to(openapi_parser_parse_alias_dir))
+        assert results.pop(key).body == path.read_text()
+
+
+def test_openapi_parser_parse_modular(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(Path(DATA_PATH / "modular.yaml"), data_model_field_type=DataModelFieldBase)
+    modules = parser.parse()
+    main_modular_dir = EXPECTED_OPEN_API_PATH / "openapi_parser_parse_modular"
+
+    for paths, result in modules.items():
+        expected = main_modular_dir.joinpath(*paths).read_text()
+        assert result.body == expected
+
+
+@pytest.mark.parametrize(
+    ("with_import", "format_", "base_class"),
+    [
+        (
+            True,
+            True,
+            None,
+        ),
+        (
+            False,
+            True,
+            None,
+        ),
+        (
+            True,
+            False,
+            None,
+        ),
+        (
+            True,
+            True,
+            "custom_module.Base",
+        ),
+    ],
+)
+def test_openapi_parser_parse_additional_properties(with_import: bool, format_: bool, base_class: str | None) -> None:
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "additional_properties.yaml").read_text(encoding="utf-8"),
+        base_class=base_class,
+        data_model_field_type=DataModelFieldBase,
+    )
+
+    assert (
+        parser.parse(with_import=with_import, format_=format_, settings_path=DATA_PATH.parent)
+        == get_expected_file(
+            "openapi_parser_parse_additional_properties",
+            with_import,
+            format_,
+            base_class,
+        ).read_text()
+    )
+
+
+def test_openapi_parser_parse_array_enum(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(source=Path(DATA_PATH / "array_enum.yaml"))
+    expected_file = get_expected_file("openapi_parser_parse_array_enum", True, True)
+    assert parser.parse() == expected_file.read_text()
+
+
+def test_openapi_parser_parse_remote_ref(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source=(DATA_PATH / "refs.yaml").read_text(),
+        http_ignore_tls=bool(os.environ.get("HTTP_IGNORE_TLS")),
+    )
+    expected_file = get_expected_file("openapi_parser_parse_remote_ref", True, True)
+
+    assert parser.parse() == expected_file.read_text()
+
+
+def test_openapi_parser_parse_required_null(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(source=Path(DATA_PATH / "required_null.yaml"))
+    assert parser.parse() == (EXPECTED_OPEN_API_PATH / "openapi_parser_parse_required_null" / "output.py").read_text()
+
+
+def test_openapi_model_resolver(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(source=(DATA_PATH / "api.yaml"))
+    parser.parse()
+
+    references = {
+        k: v.dict(
+            exclude={"source", "module_name", "actual_module_name"},
+        )
+        for k, v in parser.model_resolver.references.items()
+    }
+    assert references == {
+        "api.yaml#/components/schemas/Error": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Error",
+            "original_name": "Error",
+            "path": "api.yaml#/components/schemas/Error",
+        },
+        "api.yaml#/components/schemas/Event": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Event",
+            "original_name": "Event",
+            "path": "api.yaml#/components/schemas/Event",
+        },
+        "api.yaml#/components/schemas/Id": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Id",
+            "original_name": "Id",
+            "path": "api.yaml#/components/schemas/Id",
+        },
+        "api.yaml#/components/schemas/Pet": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Pet",
+            "original_name": "Pet",
+            "path": "api.yaml#/components/schemas/Pet",
+        },
+        "api.yaml#/components/schemas/Pets": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Pets",
+            "original_name": "Pets",
+            "path": "api.yaml#/components/schemas/Pets",
+        },
+        "api.yaml#/components/schemas/Result": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Result",
+            "original_name": "Result",
+            "path": "api.yaml#/components/schemas/Result",
+        },
+        "api.yaml#/components/schemas/Rules": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Rules",
+            "original_name": "Rules",
+            "path": "api.yaml#/components/schemas/Rules",
+        },
+        "api.yaml#/components/schemas/Users": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Users",
+            "original_name": "Users",
+            "path": "api.yaml#/components/schemas/Users",
+        },
+        "api.yaml#/components/schemas/Users/Users/0#-datamodel-code-generator-#-object-#-special-#": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "User",
+            "original_name": "Users",
+            "path": "api.yaml#/components/schemas/Users/Users/0#-datamodel-code-generator-#-object-#-special-#",
+        },
+        "api.yaml#/components/schemas/apis": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Apis",
+            "original_name": "apis",
+            "path": "api.yaml#/components/schemas/apis",
+        },
+        "api.yaml#/components/schemas/apis/apis/0#-datamodel-code-generator-#-object-#-special-#": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Api",
+            "original_name": "apis",
+            "path": "api.yaml#/components/schemas/apis/apis/0#-datamodel-code-generator-#-object-#-special-#",
+        },
+    }
+
+
+def test_openapi_parser_parse_any(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source=Path(DATA_PATH / "any.yaml"),
+    )
+    assert parser.parse() == (EXPECTED_OPEN_API_PATH / "openapi_parser_parse_any" / "output.py").read_text()
+
+
+def test_openapi_parser_responses_without_content(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source=Path(DATA_PATH / "body_and_parameters.yaml"),
+        openapi_scopes=[OpenAPIScope.Paths],
+        allow_responses_without_content=True,
+    )
+    assert (
+        parser.parse()
+        == (EXPECTED_OPEN_API_PATH / "openapi_parser_responses_without_content" / "output.py").read_text()
+    )
+
+
+def test_openapi_parser_responses_with_tag(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source=Path(DATA_PATH / "body_and_parameters.yaml"),
+        openapi_scopes=[OpenAPIScope.Tags, OpenAPIScope.Schemas, OpenAPIScope.Paths],
+    )
+    assert parser.parse() == (EXPECTED_OPEN_API_PATH / "openapi_parser_responses_with_tag" / "output.py").read_text()
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+def test_openapi_parser_with_query_parameters() -> None:
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source=Path(DATA_PATH / "query_parameters.yaml"),
+        openapi_scopes=[
+            OpenAPIScope.Parameters,
+            OpenAPIScope.Schemas,
+            OpenAPIScope.Paths,
+        ],
+    )
+    assert parser.parse() == (EXPECTED_OPEN_API_PATH / "openapi_parser_with_query_parameters" / "output.py").read_text()
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+def test_openapi_parser_with_include_path_parameters() -> None:
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source=Path(DATA_PATH / "query_parameters.yaml"),
+        openapi_scopes=[
+            OpenAPIScope.Parameters,
+            OpenAPIScope.Schemas,
+            OpenAPIScope.Paths,
+        ],
+        include_path_parameters=True,
+    )
+    assert (
+        parser.parse()
+        == (EXPECTED_OPEN_API_PATH / "openapi_parser_with_query_parameters" / "with_path_params.py").read_text()
+    )
+
+
+def test_parse_all_parameters_duplicate_names_exception() -> None:
+    parser = OpenAPIParser("", include_path_parameters=True)
+    parameters = [
+        ParameterObject.parse_obj({"name": "duplicate_param", "in": "path", "schema": {"type": "string"}}),
+        ParameterObject.parse_obj({"name": "duplicate_param", "in": "query", "schema": {"type": "integer"}}),
+    ]
+
+    with pytest.raises(Exception) as exc_info:  # noqa: PT011
+        parser.parse_all_parameters("TestModel", parameters, ["test", "path"])
+
+    assert "Parameter name 'duplicate_param' is used more than once." in str(exc_info.value)
+
+
+@pytest.mark.skipif(
+    version.parse(pydantic.VERSION) < version.parse("2.9.0"),
+    reason="Require Pydantic version 2.0.0 or later ",
+)
+def test_openapi_parser_array_called_fields_with_one_of_items() -> None:
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelField,
+        source=Path(DATA_PATH / "array_called_fields_with_oneOf_items.yaml"),
+        openapi_scopes=[
+            OpenAPIScope.Parameters,
+            OpenAPIScope.Schemas,
+            OpenAPIScope.Paths,
+        ],
+        field_constraints=True,
+    )
+    assert (
+        parser.parse()
+        == (
+            EXPECTED_OPEN_API_PATH / "openapi_parser_parse_array_called_fields_with_oneOf_items" / "output.py"
+        ).read_text()
+    )
+
+
+def test_additional_imports() -> None:
+    """Test that additional imports are inside imports container."""
+    new_parser = OpenAPIParser(source="", additional_imports=["collections.deque"])
+    assert len(new_parser.imports) == 1
+    assert new_parser.imports["collections"] == {"deque"}
+
+
+def test_no_additional_imports() -> None:
+    """Test that not additional imports are not affecting imports container."""
+    new_parser = OpenAPIParser(
+        source="",
+    )
+    assert len(new_parser.imports) == 0
+
+
+@pytest.mark.parametrize(
+    ("request_body_data", "expected_type_hints"),
+    [
+        pytest.param(
+            {"application/json": {"schema": {"type": "object", "properties": {"name": {"type": "string"}}}}},
+            {"application/json": "TestRequest"},
+            id="object_with_properties",
+        ),
+        pytest.param(
+            {
+                "application/json": {"schema": {"type": "object", "properties": {"name": {"type": "string"}}}},
+                "text/plain": {"schema": {"type": "string"}},
+            },
+            {"application/json": "TestRequest", "text/plain": "str"},
+            id="multiple_media_types",
+        ),
+        pytest.param(
+            {"application/json": {"schema": {"$ref": "#/components/schemas/RequestRef"}}},
+            {"application/json": "RequestRef"},
+            id="schema_reference",
+        ),
+        pytest.param(
+            {"application/json": {}},  # MediaObject with no schema
+            {},  # Should result in empty dict since no schema to process
+            id="missing_schema",
+        ),
+    ],
+)
+def test_parse_request_body_return(request_body_data: dict[str, Any], expected_type_hints: dict[str, str]) -> None:
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source="",
+        use_standard_collections=True,
+    )
+    result = parser.parse_request_body(
+        "TestRequest",
+        RequestBodyObject(
+            content={
+                media_type: MediaObject.parse_obj(media_data) for media_type, media_data in request_body_data.items()
+            }
+        ),
+        ["test", "path"],
+    )
+
+    assert isinstance(result, dict)
+    assert len(result) == len(expected_type_hints)
+    for media_type, expected_hint in expected_type_hints.items():
+        assert media_type in result
+        assert result[media_type].type_hint == expected_hint
+
+
+@pytest.mark.parametrize(
+    ("parameters_data", "expected_type_hint"),
+    [
+        pytest.param([], None, id="no_parameters"),
+        pytest.param(
+            [{"name": "search", "in": "query", "required": False, "schema": {"type": "string"}}],
+            "TestParametersQuery",
+            id="with_query_parameters",
+        ),
+        pytest.param(
+            [{"name": "userId", "in": "path", "required": True, "schema": {"type": "string"}}],
+            None,
+            id="path_parameter_only",
+        ),
+    ],
+)
+def test_parse_all_parameters_return(parameters_data: list[dict[str, Any]], expected_type_hint: str | None) -> None:
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source="",
+        openapi_scopes=[OpenAPIScope.Parameters],
+    )
+    result = parser.parse_all_parameters(
+        "TestParametersQuery",
+        [ParameterObject.parse_obj(param_data) for param_data in parameters_data],
+        ["test", "path"],
+    )
+    if expected_type_hint is None:
+        assert result is None
+    else:
+        assert result is not None
+        assert result.type_hint == expected_type_hint
+
+
+@pytest.mark.parametrize(
+    ("responses_data", "expected_type_hints"),
+    [
+        pytest.param(
+            {
+                "200": {
+                    "description": "Success",
+                    "content": {"application/json": {"schema": {"type": "string"}}},
+                }
+            },
+            {"200": {"application/json": "str"}},
+            id="simple_response_with_schema",
+        ),
+        pytest.param(
+            {
+                "200": {
+                    "description": "Success",
+                    "content": {
+                        "application/json": {"schema": {"type": "object", "properties": {"name": {"type": "string"}}}},
+                        "text/plain": {"schema": {"type": "string"}},
+                    },
+                },
+                "400": {
+                    "description": "Bad Request",
+                    "content": {"text/plain": {"schema": {"type": "string"}}},
+                },
+            },
+            {"200": {"application/json": "TestResponse", "text/plain": "str"}, "400": {"text/plain": "str"}},
+            id="multiple_status_codes_and_content_types",
+        ),
+        pytest.param(
+            {
+                "200": {
+                    "description": "Success",
+                    "content": {"application/json": {}},  # Content but no schema
+                }
+            },
+            {},  # Should skip since no schema in content
+            id="response_with_no_schema",
+        ),
+    ],
+)
+def test_parse_responses_return(
+    responses_data: dict[str, dict[str, Any]],
+    expected_type_hints: dict[str, dict[str, str]],
+) -> None:
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source="",
+    )
+
+    result = parser.parse_responses(
+        "TestResponse",
+        {status_code: ResponseObject.parse_obj(response_data) for status_code, response_data in responses_data.items()},
+        ["test", "path"],
+    )
+
+    assert isinstance(result, dict)
+    assert len(result) == len(expected_type_hints)
+    for status_code, expected_content_types in expected_type_hints.items():
+        assert status_code in result
+        assert len(result[status_code]) == len(expected_content_types)
+        for content_type, expected_type_hint in expected_content_types.items():
+            assert content_type in result[status_code]
+            assert result[status_code][content_type].type_hint == expected_type_hint
diff -pruN 0.26.4-3/tests/root_id.json 0.34.0-1/tests/root_id.json
--- 0.26.4-3/tests/root_id.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/root_id.json	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,24 @@
+{
+  "$id": "https://example.com/root_id.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Person": {
+      "$ref": "person.json"
+    },
+    "OriginalPerson": {
+      "$ref": "person.json"
+    },
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "examples": ["dog", "cat"]
+        },
+        "owner": {
+           "$ref": "https://example.com/person.json"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/test_format.py 0.34.0-1/tests/test_format.py
--- 0.26.4-3/tests/test_format.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/test_format.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,154 @@
+from __future__ import annotations
+
+import sys
+from pathlib import Path
+from unittest import mock
+
+import pytest
+
+from datamodel_code_generator.format import CodeFormatter, Formatter, PythonVersion, PythonVersionMin
+
+EXAMPLE_LICENSE_FILE = str(Path(__file__).parent / "data/python/custom_formatters/license_example.txt")
+
+UN_EXIST_FORMATTER = "tests.data.python.custom_formatters.un_exist"
+WRONG_FORMATTER = "tests.data.python.custom_formatters.wrong"
+NOT_SUBCLASS_FORMATTER = "tests.data.python.custom_formatters.not_subclass"
+ADD_COMMENT_FORMATTER = "tests.data.python.custom_formatters.add_comment"
+ADD_LICENSE_FORMATTER = "tests.data.python.custom_formatters.add_license"
+
+
+def test_python_version() -> None:
+    """Ensure that the python version used for the tests is properly listed"""
+
+    _ = PythonVersion("{}.{}".format(*sys.version_info[:2]))
+
+
+@pytest.mark.parametrize(
+    ("skip_string_normalization", "expected_output"),
+    [
+        (True, "a = 'b'"),
+        (False, 'a = "b"'),
+    ],
+)
+def test_format_code_with_skip_string_normalization(
+    skip_string_normalization: bool,
+    expected_output: str,
+    tmp_path: Path,
+    monkeypatch: pytest.MonkeyPatch,
+) -> None:
+    monkeypatch.chdir(tmp_path)
+    formatter = CodeFormatter(PythonVersionMin, skip_string_normalization=skip_string_normalization)
+
+    formatted_code = formatter.format_code("a = 'b'")
+
+    assert formatted_code == expected_output + "\n"
+
+
+def test_format_code_un_exist_custom_formatter() -> None:
+    with pytest.raises(ModuleNotFoundError):
+        _ = CodeFormatter(
+            PythonVersionMin,
+            custom_formatters=[UN_EXIST_FORMATTER],
+        )
+
+
+def test_format_code_invalid_formatter_name() -> None:
+    with pytest.raises(NameError):
+        _ = CodeFormatter(
+            PythonVersionMin,
+            custom_formatters=[WRONG_FORMATTER],
+        )
+
+
+def test_format_code_is_not_subclass() -> None:
+    with pytest.raises(TypeError):
+        _ = CodeFormatter(
+            PythonVersionMin,
+            custom_formatters=[NOT_SUBCLASS_FORMATTER],
+        )
+
+
+def test_format_code_with_custom_formatter_without_kwargs(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    formatter = CodeFormatter(
+        PythonVersionMin,
+        custom_formatters=[ADD_COMMENT_FORMATTER],
+    )
+
+    formatted_code = formatter.format_code("x = 1\ny = 2")
+
+    assert formatted_code == "# a comment\nx = 1\ny = 2" + "\n"
+
+
+def test_format_code_with_custom_formatter_with_kwargs(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    formatter = CodeFormatter(
+        PythonVersionMin,
+        custom_formatters=[ADD_LICENSE_FORMATTER],
+        custom_formatters_kwargs={"license_file": EXAMPLE_LICENSE_FILE},
+    )
+
+    formatted_code = formatter.format_code("x = 1\ny = 2")
+
+    assert (
+        formatted_code
+        == """# MIT License
+#
+# Copyright (c) 2023 Blah-blah
+#
+x = 1
+y = 2
+"""
+    )
+
+
+def test_format_code_with_two_custom_formatters(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    formatter = CodeFormatter(
+        PythonVersionMin,
+        custom_formatters=[
+            ADD_COMMENT_FORMATTER,
+            ADD_LICENSE_FORMATTER,
+        ],
+        custom_formatters_kwargs={"license_file": EXAMPLE_LICENSE_FILE},
+    )
+
+    formatted_code = formatter.format_code("x = 1\ny = 2")
+
+    assert (
+        formatted_code
+        == """# MIT License
+#
+# Copyright (c) 2023 Blah-blah
+#
+# a comment
+x = 1
+y = 2
+"""
+    )
+
+
+def test_format_code_ruff_format_formatter() -> None:
+    formatter = CodeFormatter(
+        PythonVersionMin,
+        formatters=[Formatter.RUFF_FORMAT],
+    )
+    with mock.patch("subprocess.run") as mock_run:
+        mock_run.return_value.stdout = b"output"
+        formatted_code = formatter.format_code("input")
+
+    assert formatted_code == "output"
+    mock_run.assert_called_once_with(("ruff", "format", "-"), input=b"input", capture_output=True, check=False)
+
+
+def test_format_code_ruff_check_formatter() -> None:
+    formatter = CodeFormatter(
+        PythonVersionMin,
+        formatters=[Formatter.RUFF_CHECK],
+    )
+    with mock.patch("subprocess.run") as mock_run:
+        mock_run.return_value.stdout = b"output"
+        formatted_code = formatter.format_code("input")
+
+    assert formatted_code == "output"
+    mock_run.assert_called_once_with(("ruff", "check", "--fix", "-"), input=b"input", capture_output=True, check=False)
diff -pruN 0.26.4-3/tests/test_imports.py 0.34.0-1/tests/test_imports.py
--- 0.26.4-3/tests/test_imports.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/test_imports.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,31 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+from datamodel_code_generator.imports import Import, Imports
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+
+@pytest.mark.parametrize(
+    ("inputs", "value"),
+    [
+        ([(None, "foo")], "import foo"),
+        ([(".", "foo")], "from . import foo"),
+        ([("bar", "foo")], "from bar import foo"),
+        ([("bar", "foo"), ("bar", "baz")], "from bar import baz, foo"),
+        ([("bar", "foo"), ("rab", "oof")], "from bar import foo\nfrom rab import oof"),
+        ([("bar", "foo"), ("bar", "foo")], "from bar import foo"),
+        ([(None, "foo.baz")], "import foo.baz"),
+    ],
+)
+def test_dump(inputs: Sequence[tuple[str | None, str]], value: str) -> None:
+    """Test creating import lines."""
+
+    imports = Imports()
+    imports.append(Import(from_=from_, import_=import_) for from_, import_ in inputs)
+
+    assert str(imports) == value
diff -pruN 0.26.4-3/tests/test_infer_input_type.py 0.34.0-1/tests/test_infer_input_type.py
--- 0.26.4-3/tests/test_infer_input_type.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/test_infer_input_type.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,69 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+import pytest
+
+from datamodel_code_generator import Error, InputFileType, infer_input_type
+
+DATA_PATH: Path = Path(__file__).parent / "data"
+
+
+def test_infer_input_type() -> None:
+    def assert_infer_input_type(file: Path, raw_data_type: InputFileType) -> None:
+        __tracebackhide__ = True
+        if file.is_dir():
+            return
+        if file.suffix not in {".yaml", ".json"}:
+            return
+        result = infer_input_type(file.read_text(encoding="utf-8"))
+        assert result == raw_data_type, f"{file} was the wrong type!"
+
+    def assert_invalid_infer_input_type(file: Path) -> None:
+        with pytest.raises(
+            Error,
+            match=(
+                r"Can't infer input file type from the input data. "
+                r"Please specify the input file type explicitly with --input-file-type option."
+            ),
+        ):
+            infer_input_type(file.read_text(encoding="utf-8"))
+
+    for file in (DATA_PATH / "csv").rglob("*"):
+        assert_infer_input_type(file, InputFileType.CSV)
+
+    for file in (DATA_PATH / "json").rglob("*"):
+        if file.name.endswith("broken.json"):
+            continue
+        assert_infer_input_type(file, InputFileType.Json)
+    for file in (DATA_PATH / "jsonschema").rglob("*"):
+        if file.name.endswith((
+            "external_child.json",
+            "external_child.yaml",
+            "extra_data_msgspec.json",
+        )):
+            continue
+        assert_infer_input_type(file, InputFileType.JsonSchema)
+    for file in (DATA_PATH / "openapi").rglob("*"):
+        if "all_of_with_relative_ref" in file.parts:
+            continue
+        if "reference_same_hierarchy_directory" in file.parts:
+            continue
+        if file.name.endswith((
+            "aliases.json",
+            "extra_data.json",
+            "extra_data_msgspec.json",
+            "invalid.yaml",
+            "list.json",
+            "empty_data.json",
+            "root_model.yaml",
+            "json_pointer.yaml",
+            "const.json",
+            "array_called_fields_with_oneOf_items.yaml",
+        )):
+            continue
+
+        if file.name.endswith("not.json"):
+            assert_invalid_infer_input_type(file)
+            continue
+        assert_infer_input_type(file, InputFileType.OpenAPI)
diff -pruN 0.26.4-3/tests/test_main_kr.py 0.34.0-1/tests/test_main_kr.py
--- 0.26.4-3/tests/test_main_kr.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/test_main_kr.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,411 @@
+from __future__ import annotations
+
+import shutil
+from argparse import Namespace
+from pathlib import Path
+
+import black
+import pytest
+from freezegun import freeze_time
+
+from datamodel_code_generator import MIN_VERSION, chdir, inferred_message
+from datamodel_code_generator.__main__ import Exit, main
+
+DATA_PATH: Path = Path(__file__).parent / "data"
+OPEN_API_DATA_PATH: Path = DATA_PATH / "openapi"
+EXPECTED_MAIN_KR_PATH = DATA_PATH / "expected" / "main_kr"
+
+
+TIMESTAMP = "1985-10-26T01:21:00-07:00"
+
+
+@pytest.fixture(autouse=True)
+def reset_namespace(monkeypatch: pytest.MonkeyPatch) -> None:
+    namespace_ = Namespace(no_color=False)
+    monkeypatch.setattr("datamodel_code_generator.__main__.namespace", namespace_)
+    monkeypatch.setattr("datamodel_code_generator.arguments.namespace", namespace_)
+
+
+@freeze_time("2019-07-26")
+def test_main(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_MAIN_KR_PATH / "main" / "output.py").read_text()
+
+
+@freeze_time("2019-07-26")
+def test_main_base_class(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    shutil.copy(DATA_PATH / "pyproject.toml", tmp_path / "pyproject.toml")
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--base-class",
+        "custom_module.Base",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8") == (EXPECTED_MAIN_KR_PATH / "main_base_class" / "output.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_target_python_version(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+        "--target-python-version",
+        f"3.{MIN_VERSION}",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_MAIN_KR_PATH / "target_python_version" / "output.py").read_text()
+    )
+
+
+def test_main_modular(tmp_path: Path) -> None:
+    """Test main function on modular file."""
+    input_filename = OPEN_API_DATA_PATH / "modular.yaml"
+    output_path = tmp_path / "model"
+
+    with freeze_time(TIMESTAMP):
+        main(["--input", str(input_filename), "--output", str(output_path)])
+    main_modular_dir = EXPECTED_MAIN_KR_PATH / "main_modular"
+    for path in main_modular_dir.rglob("*.py"):
+        result = output_path.joinpath(path.relative_to(main_modular_dir)).read_text()
+        assert result == path.read_text()
+
+
+def test_main_modular_no_file() -> None:
+    """Test main function on modular file with no output name."""
+
+    input_filename = OPEN_API_DATA_PATH / "modular.yaml"
+
+    assert main(["--input", str(input_filename)]) == Exit.ERROR
+
+
+def test_main_modular_filename(tmp_path: Path) -> None:
+    """Test main function on modular file with filename."""
+
+    input_filename = OPEN_API_DATA_PATH / "modular.yaml"
+    output_filename = tmp_path / "model.py"
+
+    assert main(["--input", str(input_filename), "--output", str(output_filename)]) == Exit.ERROR
+
+
+def test_main_no_file(capsys: pytest.CaptureFixture, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    monkeypatch.chdir(tmp_path)
+    """Test main function on non-modular file with no output name."""
+
+    input_filename = OPEN_API_DATA_PATH / "api.yaml"
+
+    with freeze_time(TIMESTAMP):
+        main(["--input", str(input_filename)])
+
+    captured = capsys.readouterr()
+    assert captured.out == (EXPECTED_MAIN_KR_PATH / "main_no_file" / "output.py").read_text()
+
+    assert captured.err == inferred_message.format("openapi") + "\n"
+
+
+def test_main_custom_template_dir(
+    capsys: pytest.CaptureFixture, tmp_path: Path, monkeypatch: pytest.MonkeyPatch
+) -> None:
+    monkeypatch.chdir(tmp_path)
+    """Test main function with custom template directory."""
+
+    input_filename = OPEN_API_DATA_PATH / "api.yaml"
+    custom_template_dir = DATA_PATH / "templates"
+    extra_template_data = OPEN_API_DATA_PATH / "extra_data.json"
+
+    with freeze_time(TIMESTAMP):
+        main([
+            "--input",
+            str(input_filename),
+            "--custom-template-dir",
+            str(custom_template_dir),
+            "--extra-template-data",
+            str(extra_template_data),
+        ])
+
+    captured = capsys.readouterr()
+    assert captured.out == (EXPECTED_MAIN_KR_PATH / "main_custom_template_dir" / "output.py").read_text()
+    assert captured.err == inferred_message.format("openapi") + "\n"
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+@freeze_time("2019-07-26")
+def test_pyproject(tmp_path: Path) -> None:
+    pyproject_toml = DATA_PATH / "project" / "pyproject.toml"
+    shutil.copy(pyproject_toml, tmp_path)
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api.yaml"),
+        "--output",
+        str(output_file),
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == (EXPECTED_MAIN_KR_PATH / "pyproject" / "output.py").read_text()
+
+
+@pytest.mark.parametrize("language", ["UK", "US"])
+def test_pyproject_respects_both_spellings_of_capitalize_enum_members_flag(language: str, tmp_path: Path) -> None:
+    pyproject_toml_data = f"""
+[tool.datamodel-codegen]
+capitali{"s" if language == "UK" else "z"}e-enum-members = true
+enable-version-header = false
+input-file-type = "jsonschema"
+"""
+    with (tmp_path / "pyproject.toml").open("w") as f:
+        f.write(pyproject_toml_data)
+
+        input_data = """
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "MyEnum": {
+      "enum": [
+        "MEMBER_1",
+        "member_2"
+      ]
+    }
+  }
+}
+"""
+    input_file = tmp_path / "schema.json"
+    with input_file.open("w") as f:
+        f.write(input_data)
+
+    expected_output = """# generated by datamodel-codegen:
+#   filename:  schema.json
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class MyEnum(Enum):
+    MEMBER_1 = 'MEMBER_1'
+    member_2 = 'member_2'
+"""
+
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--disable-timestamp",
+        "--input",
+        input_file.as_posix(),
+        "--output",
+        output_file.as_posix(),
+    ])
+    assert return_code == Exit.OK
+    assert output_file.read_text(encoding="utf-8") == expected_output, (
+        f"\nExpected  output:\n{expected_output}\n\nGenerated output:\n{output_file.read_text(encoding='utf-8')}"
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+@freeze_time("2019-07-26")
+def test_pyproject_with_tool_section(tmp_path: Path) -> None:
+    """Test that a pyproject.toml with a [tool.datamodel-codegen] section is
+    found and its configuration applied.
+    """
+    pyproject_toml = """
+[tool.datamodel-codegen]
+target-python-version = "3.10"
+strict-types = ["str"]
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+    output_file: Path = tmp_path / "output.py"
+
+    # Run main from within the output directory so we can find our
+    # pyproject.toml.
+    with chdir(tmp_path):
+        return_code: Exit = main([
+            "--input",
+            str((OPEN_API_DATA_PATH / "api.yaml").resolve()),
+            "--output",
+            str(output_file.resolve()),
+        ])
+
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        # We expect the output to use pydantic.StrictStr in place of str
+        == (EXPECTED_MAIN_KR_PATH / "pyproject" / "output.strictstr.py").read_text()
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_use_schema_description(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api_multiline_docstrings.yaml"),
+        "--output",
+        str(output_file),
+        "--use-schema-description",
+    ])
+    assert return_code == Exit.OK
+    assert (
+        output_file.read_text(encoding="utf-8")
+        == (EXPECTED_MAIN_KR_PATH / "main_use_schema_description" / "output.py").read_text()
+    )
+
+
+@freeze_time("2022-11-11")
+def test_main_use_field_description(tmp_path: Path) -> None:
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--input",
+        str(OPEN_API_DATA_PATH / "api_multiline_docstrings.yaml"),
+        "--output",
+        str(output_file),
+        "--use-field-description",
+    ])
+    assert return_code == Exit.OK
+    generated = output_file.read_text(encoding="utf-8")
+    expected = (EXPECTED_MAIN_KR_PATH / "main_use_field_description" / "output.py").read_text()
+    assert generated == expected
+
+
+def test_capitalise_enum_members(tmp_path: Path) -> None:
+    """capitalise-enum-members not working since v0.28.5
+
+    From https://github.com/koxudaxi/datamodel-code-generator/issues/2370
+    """
+    input_data = """
+openapi: 3.0.3
+info:
+  version: X.Y.Z
+  title: example schema
+servers:
+  - url: "https://acme.org"
+paths: {}
+components:
+  schemas:
+    EnumSystems:
+      type: enum
+      enum:
+        - linux
+        - osx
+        - windows
+"""
+    input_file = tmp_path / "myschema.yaml"
+    input_file.write_text(input_data, encoding="utf_8")
+
+    expected_output = """# generated by datamodel-codegen:
+#   filename:  myschema.yaml
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class EnumSystems(Enum):
+    LINUX = 'linux'
+    OSX = 'osx'
+    WINDOWS = 'windows'
+"""
+
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--disable-timestamp",
+        "--capitalise-enum-members",
+        "--snake-case-field",
+        "--input",
+        str(input_file),
+        "--output",
+        str(output_file),
+    ])
+    assert return_code == Exit.OK
+    output_file_read_text = output_file.read_text(encoding="utf_8")
+    assert output_file_read_text == expected_output, (
+        f"\nExpected  output:\n{expected_output}\n\nGenerated output:\n{output_file_read_text}"
+    )
+
+
+def test_capitalise_enum_members_and_use_subclass_enum(tmp_path: Path) -> None:
+    """Combination of capitalise-enum-members and use-subclass-enum not working since v0.28.5
+
+    From https://github.com/koxudaxi/datamodel-code-generator/issues/2395
+    """
+    input_data = """
+openapi: 3.0.3
+info:
+  version: X.Y.Z
+  title: example schema
+servers:
+  - url: "https://acme.org"
+paths: {}
+components:
+  schemas:
+    EnumSystems:
+      type: string
+      enum:
+        - linux
+        - osx
+        - windows
+"""
+    input_file = tmp_path / "myschema.yaml"
+    input_file.write_text(input_data, encoding="utf_8")
+
+    expected_output = """# generated by datamodel-codegen:
+#   filename:  myschema.yaml
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class EnumSystems(str, Enum):
+    LINUX = 'linux'
+    OSX = 'osx'
+    WINDOWS = 'windows'
+"""
+
+    output_file: Path = tmp_path / "output.py"
+    return_code: Exit = main([
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--disable-timestamp",
+        "--capitalise-enum-members",
+        "--snake-case-field",
+        "--use-subclass-enum",
+        "--input",
+        str(input_file),
+        "--output",
+        str(output_file),
+    ])
+    assert return_code == Exit.OK
+    output_file_read_text = output_file.read_text(encoding="utf_8")
+    assert output_file_read_text == expected_output, (
+        f"\nExpected  output:\n{expected_output}\n\nGenerated output:\n{output_file_read_text}"
+    )
diff -pruN 0.26.4-3/tests/test_reference.py 0.34.0-1/tests/test_reference.py
--- 0.26.4-3/tests/test_reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/test_reference.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,65 @@
+from __future__ import annotations
+
+from pathlib import PurePosixPath, PureWindowsPath
+
+import pytest
+
+from datamodel_code_generator.reference import ModelResolver, get_relative_path
+
+
+@pytest.mark.parametrize(
+    ("base_path", "target_path", "expected"),
+    [
+        ("/a/b", "/a/b", "."),
+        ("/a/b", "/a/b/c", "c"),
+        ("/a/b", "/a/b/c/d", "c/d"),
+        ("/a/b/c", "/a/b", ".."),
+        ("/a/b/c/d", "/a/b", "../.."),
+        ("/a/b/c/d", "/a", "../../.."),
+        ("/a/b/c/d", "/a/x/y/z", "../../../x/y/z"),
+        ("/a/b/c/d", "a/x/y/z", "a/x/y/z"),
+        ("/a/b/c/d", "/a/b/e/d", "../../e/d"),
+    ],
+)
+def test_get_relative_path_posix(base_path: str, target_path: str, expected: str) -> None:
+    assert PurePosixPath(get_relative_path(PurePosixPath(base_path), PurePosixPath(target_path))) == PurePosixPath(
+        expected
+    )
+
+
+@pytest.mark.parametrize(
+    ("base_path", "target_path", "expected"),
+    [
+        ("c:/a/b", "c:/a/b", "."),
+        ("c:/a/b", "c:/a/b/c", "c"),
+        ("c:/a/b", "c:/a/b/c/d", "c/d"),
+        ("c:/a/b/c", "c:/a/b", ".."),
+        ("c:/a/b/c/d", "c:/a/b", "../.."),
+        ("c:/a/b/c/d", "c:/a", "../../.."),
+        ("c:/a/b/c/d", "c:/a/x/y/z", "../../../x/y/z"),
+        ("c:/a/b/c/d", "a/x/y/z", "a/x/y/z"),
+        ("c:/a/b/c/d", "c:/a/b/e/d", "../../e/d"),
+    ],
+)
+def test_get_relative_path_windows(base_path: str, target_path: str, expected: str) -> None:
+    assert PureWindowsPath(
+        get_relative_path(PureWindowsPath(base_path), PureWindowsPath(target_path))
+    ) == PureWindowsPath(expected)
+
+
+def test_model_resolver_add_ref_with_hash() -> None:
+    model_resolver = ModelResolver()
+    reference = model_resolver.add_ref("https://json-schema.org/draft/2020-12/meta/core#")
+    assert reference.original_name == "core"
+
+
+def test_model_resolver_add_ref_without_hash() -> None:
+    model_resolver = ModelResolver()
+    reference = model_resolver.add_ref("meta/core")
+    assert reference.original_name == "core"
+
+
+def test_model_resolver_add_ref_unevaluated() -> None:
+    model_resolver = ModelResolver()
+    reference = model_resolver.add_ref("meta/unevaluated")
+    assert reference.original_name == "unevaluated"
diff -pruN 0.26.4-3/tests/test_resolver.py 0.34.0-1/tests/test_resolver.py
--- 0.26.4-3/tests/test_resolver.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/test_resolver.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,18 @@
+from __future__ import annotations
+
+import pytest
+
+from datamodel_code_generator.reference import FieldNameResolver
+
+
+@pytest.mark.parametrize(
+    ("name", "expected_resolved"),
+    [
+        ("3a", "field_3a"),
+        ("$in", "field_in"),
+        ("field", "field"),
+    ],
+)
+def test_get_valid_field_name(name: str, expected_resolved: str) -> None:
+    resolver = FieldNameResolver()
+    assert expected_resolved == resolver.get_valid_name(name)
diff -pruN 0.26.4-3/tests/test_types.py 0.34.0-1/tests/test_types.py
--- 0.26.4-3/tests/test_types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.34.0-1/tests/test_types.py	2025-09-30 07:37:47.000000000 +0000
@@ -0,0 +1,131 @@
+from __future__ import annotations
+
+import pytest
+
+from datamodel_code_generator.types import _remove_none_from_union, get_optional_type
+
+
+@pytest.mark.parametrize(
+    ("input_", "use_union_operator", "expected"),
+    [
+        ("List[str]", False, "Optional[List[str]]"),
+        ("List[str, int, float]", False, "Optional[List[str, int, float]]"),
+        ("List[str, int, None]", False, "Optional[List[str, int, None]]"),
+        ("Union[str]", False, "Optional[str]"),
+        ("Union[str, int, float]", False, "Optional[Union[str, int, float]]"),
+        ("Union[str, int, None]", False, "Optional[Union[str, int]]"),
+        ("Union[str, int, None, None]", False, "Optional[Union[str, int]]"),
+        (
+            "Union[str, int, List[str, int, None], None]",
+            False,
+            "Optional[Union[str, int, List[str, int, None]]]",
+        ),
+        (
+            "Union[str, int, List[str, Dict[int, str | None]], None]",
+            False,
+            "Optional[Union[str, int, List[str, Dict[int, str | None]]]]",
+        ),
+        ("List[str]", True, "List[str] | None"),
+        ("List[str | int | float]", True, "List[str | int | float] | None"),
+        ("List[str | int | None]", True, "List[str | int | None] | None"),
+        ("str", True, "str | None"),
+        ("str | int | float", True, "str | int | float | None"),
+        ("str | int | None", True, "str | int | None"),
+        ("str | int | None | None", True, "str | int | None"),
+        (
+            "str | int | List[str | Dict[int | Union[str | None]]] | None",
+            True,
+            "str | int | List[str | Dict[int | Union[str | None]]] | None",
+        ),
+    ],
+)
+def test_get_optional_type(input_: str, use_union_operator: bool, expected: str) -> None:
+    assert get_optional_type(input_, use_union_operator) == expected
+
+
+@pytest.mark.parametrize(
+    ("type_str", "use_union_operator", "expected"),
+    [
+        # Traditional Union syntax
+        ("Union[str, None]", False, "str"),
+        ("Union[str, int, None]", False, "Union[str, int]"),
+        ("Union[None, str]", False, "str"),
+        ("Union[None]", False, "None"),
+        ("Union[None, None]", False, "None"),
+        ("Union[Union[str, None], int]", False, "Union[str, int]"),
+        # Union for constraint strings with pattern or regex
+        (
+            "Union[constr(pattern=r'^a,b$'), None]",
+            False,
+            "constr(pattern=r'^a,b$')",
+        ),
+        (
+            "Union[constr(regex=r'^a,b$'), None]",
+            False,
+            "constr(regex=r'^a,b$')",
+        ),
+        (
+            "Union[constr(pattern=r'^\\d+,\\w+$'), None]",
+            False,
+            "constr(pattern=r'^\\d+,\\w+$')",
+        ),
+        (
+            "Union[constr(regex=r'^\\d+,\\w+$'), None]",
+            False,
+            "constr(regex=r'^\\d+,\\w+$')",
+        ),
+        # Union operator syntax
+        ("str | None", True, "str"),
+        ("int | str | None", True, "int | str"),
+        ("None | str", True, "str"),
+        ("None | None", True, "None"),
+        ("constr(pattern='0|1') | None", True, "constr(pattern='0|1')"),
+        ("constr(pattern='0  |1') | int | None", True, "constr(pattern='0  |1') | int"),
+        # Complex nested types - traditional syntax
+        ("Union[str, int] | None", True, "Union[str, int]"),
+        (
+            "Optional[List[Dict[str, Any]]] | None",
+            True,
+            "Optional[List[Dict[str, Any]]]",
+        ),
+        # Union for constraint strings with pattern or regex on nested types
+        (
+            "Union[constr(pattern=r'\\['), Union[str, None], int]",
+            False,
+            "Union[constr(pattern=r'\\['), str, int]",
+        ),
+        (
+            "Union[constr(regex=r'\\['), Union[str, None], int]",
+            False,
+            "Union[constr(regex=r'\\['), str, int]",
+        ),
+        # Complex nested types - union operator syntax
+        ("List[str | None] | None", True, "List[str | None]"),
+        (
+            "List[constr(pattern='0|1') | None] | None",
+            True,
+            "List[constr(pattern='0|1') | None]",
+        ),
+        (
+            "List[constr(pattern='0 | 1') | None] | None",
+            True,
+            "List[constr(pattern='0 | 1') | None]",
+        ),
+        (
+            "List[constr(pattern='0  | 1') | None] | None",
+            True,
+            "List[constr(pattern='0  | 1') | None]",
+        ),
+        ("Dict[str, int] | None | List[str]", True, "Dict[str, int] | List[str]"),
+        # Edge cases that test the fixed regex pattern issue
+        ("List[str] | None", True, "List[str]"),
+        ("Dict[str, int] | None", True, "Dict[str, int]"),
+        ("Tuple[int, ...] | None", True, "Tuple[int, ...]"),
+        ("Callable[[int], str] | None", True, "Callable[[int], str]"),
+        # Non-union types (should be returned as-is)
+        ("str", False, "str"),
+        ("List[str]", False, "List[str]"),
+    ],
+)
+def test_remove_none_from_union(type_str: str, use_union_operator: bool, expected: str) -> None:
+    assert _remove_none_from_union(type_str, use_union_operator=use_union_operator) == expected
