diff --git a/libs/core/langchain_core/_api/deprecation.py b/libs/core/langchain_core/_api/deprecation.py index fcbfc58b38fc0..355d9d227412d 100644 --- a/libs/core/langchain_core/_api/deprecation.py +++ b/libs/core/langchain_core/_api/deprecation.py @@ -23,6 +23,8 @@ cast, ) +from pydantic.fields import FieldInfo +from pydantic.v1.fields import FieldInfo as FieldInfoV1 from typing_extensions import ParamSpec from langchain_core._api.internal import is_caller_internal @@ -152,10 +154,6 @@ def deprecate( _package: str = package, ) -> T: """Implementation of the decorator returned by `deprecated`.""" - from langchain_core.utils.pydantic import ( # type: ignore[attr-defined] - FieldInfoV1, - FieldInfoV2, - ) def emit_warning() -> None: """Emit the warning.""" @@ -249,7 +247,7 @@ def finalize(wrapper: Callable[..., Any], new_doc: str) -> T: # noqa: ARG001 ), ) - elif isinstance(obj, FieldInfoV2): + elif isinstance(obj, FieldInfo): wrapped = None if not _obj_type: _obj_type = "attribute" @@ -261,7 +259,7 @@ def finalize(wrapper: Callable[..., Any], new_doc: str) -> T: # noqa: ARG001 def finalize(wrapper: Callable[..., Any], new_doc: str) -> T: # noqa: ARG001 return cast( "T", - FieldInfoV2( + FieldInfo( default=obj.default, default_factory=obj.default_factory, description=new_doc, diff --git a/libs/core/langchain_core/language_models/chat_models.py b/libs/core/langchain_core/language_models/chat_models.py index 93fdc325898cf..524c5d72b5aaa 100644 --- a/libs/core/langchain_core/language_models/chat_models.py +++ b/libs/core/langchain_core/language_models/chat_models.py @@ -621,7 +621,7 @@ def _get_invocation_params( stop: Optional[list[str]] = None, **kwargs: Any, ) -> dict: - params = self.dict() + params = self.model_dump() params["stop"] = stop return {**params, **kwargs} @@ -1288,7 +1288,7 @@ def _llm_type(self) -> str: """Return type of chat model.""" @override - def dict(self, **kwargs: Any) -> dict: + def model_dump(self, **kwargs: Any) -> dict: """Return a dictionary of the LLM.""" starter_dict = dict(self._identifying_params) starter_dict["_type"] = self._llm_type diff --git a/libs/core/langchain_core/language_models/llms.py b/libs/core/langchain_core/language_models/llms.py index 87f3974ae1ce6..8782268cb743c 100644 --- a/libs/core/langchain_core/language_models/llms.py +++ b/libs/core/langchain_core/language_models/llms.py @@ -528,7 +528,7 @@ def stream( else: prompt = self._convert_input(input).to_string() config = ensure_config(config) - params = self.dict() + params = self.model_dump() params["stop"] = stop params = {**params, **kwargs} options = {"stop": stop} @@ -598,7 +598,7 @@ async def astream( prompt = self._convert_input(input).to_string() config = ensure_config(config) - params = self.dict() + params = self.model_dump() params["stop"] = stop params = {**params, **kwargs} options = {"stop": stop} @@ -941,7 +941,7 @@ def generate( ] * len(prompts) run_name_list = [cast("Optional[str]", run_name)] * len(prompts) run_ids_list = self._get_run_ids_list(run_id, prompts) - params = self.dict() + params = self.model_dump() params["stop"] = stop options = {"stop": stop} ( @@ -1193,7 +1193,7 @@ async def agenerate( ] * len(prompts) run_name_list = [cast("Optional[str]", run_name)] * len(prompts) run_ids_list = self._get_run_ids_list(run_id, prompts) - params = self.dict() + params = self.model_dump() params["stop"] = stop options = {"stop": stop} ( @@ -1400,7 +1400,7 @@ def _llm_type(self) -> str: """Return type of llm.""" @override - def dict(self, **kwargs: Any) -> dict: + def model_dump(self, **kwargs: Any) -> dict: """Return a dictionary of the LLM.""" starter_dict = dict(self._identifying_params) starter_dict["_type"] = self._llm_type @@ -1427,7 +1427,7 @@ def save(self, file_path: Union[Path, str]) -> None: directory_path.mkdir(parents=True, exist_ok=True) # Fetch dictionary to save - prompt_dict = self.dict() + prompt_dict = self.model_dump() if save_path.suffix == ".json": with save_path.open("w") as f: diff --git a/libs/core/langchain_core/output_parsers/base.py b/libs/core/langchain_core/output_parsers/base.py index a14a10829f99e..038b43d626778 100644 --- a/libs/core/langchain_core/output_parsers/base.py +++ b/libs/core/langchain_core/output_parsers/base.py @@ -324,9 +324,9 @@ def _type(self) -> str: ) raise NotImplementedError(msg) - def dict(self, **kwargs: Any) -> dict: + def model_dump(self, **kwargs: Any) -> dict: """Return dictionary representation of output parser.""" - output_parser_dict = super().dict(**kwargs) + output_parser_dict = super().model_dump(**kwargs) with contextlib.suppress(NotImplementedError): output_parser_dict["_type"] = self._type return output_parser_dict diff --git a/libs/core/langchain_core/output_parsers/json.py b/libs/core/langchain_core/output_parsers/json.py index 84f510f331ddd..8717b35e4a4df 100644 --- a/libs/core/langchain_core/output_parsers/json.py +++ b/libs/core/langchain_core/output_parsers/json.py @@ -9,6 +9,7 @@ import jsonpatch # type: ignore[import-untyped] import pydantic from pydantic import SkipValidation +from pydantic.v1 import BaseModel from langchain_core.exceptions import OutputParserException from langchain_core.output_parsers.format_instructions import JSON_FORMAT_INSTRUCTIONS @@ -19,16 +20,9 @@ parse_json_markdown, parse_partial_json, ) -from langchain_core.utils.pydantic import IS_PYDANTIC_V1 -if IS_PYDANTIC_V1: - PydanticBaseModel = pydantic.BaseModel - -else: - from pydantic.v1 import BaseModel - - # Union type needs to be last assignment to PydanticBaseModel to make mypy happy. - PydanticBaseModel = Union[BaseModel, pydantic.BaseModel] # type: ignore[assignment,misc] +# Union type needs to be last assignment to PydanticBaseModel to make mypy happy. +PydanticBaseModel = Union[BaseModel, pydantic.BaseModel] TBaseModel = TypeVar("TBaseModel", bound=PydanticBaseModel) diff --git a/libs/core/langchain_core/output_parsers/openai_functions.py b/libs/core/langchain_core/output_parsers/openai_functions.py index f71b2c8044bcd..feab3707997fc 100644 --- a/libs/core/langchain_core/output_parsers/openai_functions.py +++ b/libs/core/langchain_core/output_parsers/openai_functions.py @@ -7,6 +7,7 @@ import jsonpatch # type: ignore[import-untyped] from pydantic import BaseModel, model_validator +from pydantic.v1 import BaseModel as BaseModelV1 from typing_extensions import override from langchain_core.exceptions import OutputParserException @@ -274,10 +275,13 @@ def parse_result(self, result: list[Generation], *, partial: bool = False) -> An pydantic_schema = self.pydantic_schema[fn_name] else: pydantic_schema = self.pydantic_schema - if hasattr(pydantic_schema, "model_validate_json"): + if issubclass(pydantic_schema, BaseModel): pydantic_args = pydantic_schema.model_validate_json(_args) - else: + elif issubclass(pydantic_schema, BaseModelV1): pydantic_args = pydantic_schema.parse_raw(_args) + else: + msg = f"Unsupported pydantic schema: {pydantic_schema}" + raise ValueError(msg) return pydantic_args diff --git a/libs/core/langchain_core/output_parsers/pydantic.py b/libs/core/langchain_core/output_parsers/pydantic.py index 194cc3a2d364e..b543df55768ba 100644 --- a/libs/core/langchain_core/output_parsers/pydantic.py +++ b/libs/core/langchain_core/output_parsers/pydantic.py @@ -11,7 +11,6 @@ from langchain_core.output_parsers import JsonOutputParser from langchain_core.outputs import Generation from langchain_core.utils.pydantic import ( - IS_PYDANTIC_V2, PydanticBaseModel, TBaseModel, ) @@ -24,22 +23,16 @@ class PydanticOutputParser(JsonOutputParser, Generic[TBaseModel]): """The pydantic model to parse.""" def _parse_obj(self, obj: dict) -> TBaseModel: - if IS_PYDANTIC_V2: - try: - if issubclass(self.pydantic_object, pydantic.BaseModel): - return self.pydantic_object.model_validate(obj) - if issubclass(self.pydantic_object, pydantic.v1.BaseModel): - return self.pydantic_object.parse_obj(obj) - msg = f"Unsupported model version for PydanticOutputParser: \ - {self.pydantic_object.__class__}" - raise OutputParserException(msg) - except (pydantic.ValidationError, pydantic.v1.ValidationError) as e: - raise self._parser_exception(e, obj) from e - else: # pydantic v1 - try: + try: + if issubclass(self.pydantic_object, pydantic.BaseModel): + return self.pydantic_object.model_validate(obj) + if issubclass(self.pydantic_object, pydantic.v1.BaseModel): return self.pydantic_object.parse_obj(obj) - except pydantic.ValidationError as e: - raise self._parser_exception(e, obj) from e + msg = f"Unsupported model version for PydanticOutputParser: \ + {self.pydantic_object.__class__}" + raise OutputParserException(msg) + except (pydantic.ValidationError, pydantic.v1.ValidationError) as e: + raise self._parser_exception(e, obj) from e def _parser_exception( self, e: Exception, json_object: dict diff --git a/libs/core/langchain_core/output_parsers/transform.py b/libs/core/langchain_core/output_parsers/transform.py index 783abedf116ff..42949bdaf55a8 100644 --- a/libs/core/langchain_core/output_parsers/transform.py +++ b/libs/core/langchain_core/output_parsers/transform.py @@ -125,7 +125,7 @@ def _transform(self, input: Iterator[Union[str, BaseMessage]]) -> Iterator[Any]: chunk_gen = ChatGenerationChunk(message=chunk) elif isinstance(chunk, BaseMessage): chunk_gen = ChatGenerationChunk( - message=BaseMessageChunk(**chunk.dict()) + message=BaseMessageChunk(**chunk.model_dump()) ) else: chunk_gen = GenerationChunk(text=chunk) @@ -151,7 +151,7 @@ async def _atransform( chunk_gen = ChatGenerationChunk(message=chunk) elif isinstance(chunk, BaseMessage): chunk_gen = ChatGenerationChunk( - message=BaseMessageChunk(**chunk.dict()) + message=BaseMessageChunk(**chunk.model_dump()) ) else: chunk_gen = GenerationChunk(text=chunk) diff --git a/libs/core/langchain_core/prompts/base.py b/libs/core/langchain_core/prompts/base.py index d880971750f19..02808488f6947 100644 --- a/libs/core/langchain_core/prompts/base.py +++ b/libs/core/langchain_core/prompts/base.py @@ -331,7 +331,7 @@ def _prompt_type(self) -> str: """Return the prompt type key.""" raise NotImplementedError - def dict(self, **kwargs: Any) -> dict: + def model_dump(self, **kwargs: Any) -> dict: """Return dictionary representation of prompt. Args: @@ -369,7 +369,7 @@ def save(self, file_path: Union[Path, str]) -> None: raise ValueError(msg) # Fetch dictionary to save - prompt_dict = self.dict() + prompt_dict = self.model_dump() if "_type" not in prompt_dict: msg = f"Prompt {self} does not support saving." raise NotImplementedError(msg) diff --git a/libs/core/langchain_core/pydantic_v1/__init__.py b/libs/core/langchain_core/pydantic_v1/__init__.py index e4dfbbaea1916..1f7c9cb86991f 100644 --- a/libs/core/langchain_core/pydantic_v1/__init__.py +++ b/libs/core/langchain_core/pydantic_v1/__init__.py @@ -2,24 +2,9 @@ from importlib import metadata -from langchain_core._api.deprecation import warn_deprecated - -# Create namespaces for pydantic v1 and v2. -# This code must stay at the top of the file before other modules may -# attempt to import pydantic since it adds pydantic_v1 and pydantic_v2 to sys.modules. -# -# This hack is done for the following reasons: -# * Langchain will attempt to remain compatible with both pydantic v1 and v2 since -# both dependencies and dependents may be stuck on either version of v1 or v2. -# * Creating namespaces for pydantic v1 and v2 should allow us to write code that -# unambiguously uses either v1 or v2 API. -# * This change is easier to roll out and roll back. - -try: - from pydantic.v1 import * # noqa: F403 -except ImportError: - from pydantic import * # type: ignore[assignment,no-redef] # noqa: F403 +from pydantic.v1 import * # noqa: F403 +from langchain_core._api.deprecation import warn_deprecated try: _PYDANTIC_MAJOR_VERSION: int = int(metadata.version("pydantic").split(".")[0]) diff --git a/libs/core/langchain_core/pydantic_v1/dataclasses.py b/libs/core/langchain_core/pydantic_v1/dataclasses.py index 812660578bd33..cdcdb77e3a0b2 100644 --- a/libs/core/langchain_core/pydantic_v1/dataclasses.py +++ b/libs/core/langchain_core/pydantic_v1/dataclasses.py @@ -1,11 +1,8 @@ """Pydantic v1 compatibility shim.""" -from langchain_core._api import warn_deprecated +from pydantic.v1.dataclasses import * # noqa: F403 -try: - from pydantic.v1.dataclasses import * # noqa: F403 -except ImportError: - from pydantic.dataclasses import * # type: ignore[no-redef] # noqa: F403 +from langchain_core._api import warn_deprecated warn_deprecated( "0.3.0", diff --git a/libs/core/langchain_core/pydantic_v1/main.py b/libs/core/langchain_core/pydantic_v1/main.py index 88492823173d3..005ad4ed347cd 100644 --- a/libs/core/langchain_core/pydantic_v1/main.py +++ b/libs/core/langchain_core/pydantic_v1/main.py @@ -1,11 +1,8 @@ """Pydantic v1 compatibility shim.""" -from langchain_core._api import warn_deprecated +from pydantic.v1.main import * # noqa: F403 -try: - from pydantic.v1.main import * # noqa: F403 -except ImportError: - from pydantic.main import * # type: ignore[assignment,no-redef] # noqa: F403 +from langchain_core._api import warn_deprecated warn_deprecated( "0.3.0", diff --git a/libs/core/langchain_core/tools/base.py b/libs/core/langchain_core/tools/base.py index daee86043071d..710646f52ea17 100644 --- a/libs/core/langchain_core/tools/base.py +++ b/libs/core/langchain_core/tools/base.py @@ -543,10 +543,13 @@ def _parse_input( ) raise ValueError(msg) key_ = next(iter(get_fields(input_args).keys())) - if hasattr(input_args, "model_validate"): + if issubclass(input_args, BaseModel): input_args.model_validate({key_: tool_input}) - else: + elif issubclass(input_args, BaseModelV1): input_args.parse_obj({key_: tool_input}) + else: + msg = f"args_schema must be a Pydantic BaseModel, got {input_args}" + raise TypeError(msg) return tool_input if input_args is not None: if isinstance(input_args, dict): diff --git a/libs/core/langchain_core/tracers/schemas.py b/libs/core/langchain_core/tracers/schemas.py index e7ebad1da1e90..ea06cb2f9dcb5 100644 --- a/libs/core/langchain_core/tracers/schemas.py +++ b/libs/core/langchain_core/tracers/schemas.py @@ -2,8 +2,8 @@ from __future__ import annotations -import datetime import warnings +from datetime import datetime, timezone from typing import Any, Optional from uuid import UUID @@ -32,7 +32,7 @@ def RunTypeEnum() -> type[RunTypeEnumDep]: # noqa: N802 class TracerSessionV1Base(BaseModelV1): """Base class for TracerSessionV1.""" - start_time: datetime.datetime = FieldV1(default_factory=datetime.datetime.utcnow) + start_time: datetime = FieldV1(default_factory=lambda: datetime.now(timezone.utc)) name: Optional[str] = None extra: Optional[dict[str, Any]] = None @@ -69,8 +69,8 @@ class BaseRun(BaseModelV1): uuid: str parent_uuid: Optional[str] = None - start_time: datetime.datetime = FieldV1(default_factory=datetime.datetime.utcnow) - end_time: datetime.datetime = FieldV1(default_factory=datetime.datetime.utcnow) + start_time: datetime = FieldV1(default_factory=lambda: datetime.now(timezone.utc)) + end_time: datetime = FieldV1(default_factory=lambda: datetime.now(timezone.utc)) extra: Optional[dict[str, Any]] = None execution_order: int child_execution_order: int diff --git a/libs/core/langchain_core/utils/pydantic.py b/libs/core/langchain_core/utils/pydantic.py index ea987741ce3b6..09697c236b7e6 100644 --- a/libs/core/langchain_core/utils/pydantic.py +++ b/libs/core/langchain_core/utils/pydantic.py @@ -21,9 +21,12 @@ import pydantic from packaging import version -from pydantic import ( + +# root_validator is deprecated but we need it for backward compatibility of @pre_init +from pydantic import ( # type: ignore[deprecated] BaseModel, ConfigDict, + Field, PydanticDeprecationWarning, RootModel, root_validator, @@ -38,29 +41,23 @@ JsonSchemaMode, JsonSchemaValue, ) -from typing_extensions import override +from pydantic.v1 import BaseModel as BaseModelV1 +from pydantic.v1 import create_model as create_model_v1 +from pydantic.v1.fields import FieldInfo as FieldInfoV1 +from typing_extensions import deprecated, override if TYPE_CHECKING: from pydantic_core import core_schema -try: - import pydantic - - PYDANTIC_VERSION = version.parse(pydantic.__version__) -except ImportError: - PYDANTIC_VERSION = version.parse("0.0.0") +PYDANTIC_VERSION = version.parse(pydantic.__version__) +@deprecated("Use PYDANTIC_VERSION.major instead.") def get_pydantic_major_version() -> int: """DEPRECATED - Get the major version of Pydantic. Use PYDANTIC_VERSION.major instead. """ - warnings.warn( - "get_pydantic_major_version is deprecated. Use PYDANTIC_VERSION.major instead.", - DeprecationWarning, - stacklevel=2, - ) return PYDANTIC_VERSION.major @@ -70,42 +67,20 @@ def get_pydantic_major_version() -> int: IS_PYDANTIC_V1 = PYDANTIC_VERSION.major == 1 IS_PYDANTIC_V2 = PYDANTIC_VERSION.major == 2 -if IS_PYDANTIC_V1: - from pydantic.fields import FieldInfo as FieldInfoV1 - - PydanticBaseModel = pydantic.BaseModel - TypeBaseModel = type[BaseModel] -elif IS_PYDANTIC_V2: - from pydantic.v1.fields import FieldInfo as FieldInfoV1 # type: ignore[assignment] - - # Union type needs to be last assignment to PydanticBaseModel to make mypy happy. - PydanticBaseModel = Union[BaseModel, pydantic.BaseModel] # type: ignore[assignment,misc] - TypeBaseModel = Union[type[BaseModel], type[pydantic.BaseModel]] # type: ignore[misc] -else: - msg = f"Unsupported Pydantic version: {PYDANTIC_VERSION.major}" - raise ValueError(msg) - +PydanticBaseModel = BaseModel +TypeBaseModel = type[BaseModel] TBaseModel = TypeVar("TBaseModel", bound=PydanticBaseModel) def is_pydantic_v1_subclass(cls: type) -> bool: """Check if the installed Pydantic version is 1.x-like.""" - if IS_PYDANTIC_V1: - return True - if IS_PYDANTIC_V2: - from pydantic.v1 import BaseModel as BaseModelV1 - - if issubclass(cls, BaseModelV1): - return True - return False + return issubclass(cls, BaseModelV1) def is_pydantic_v2_subclass(cls: type) -> bool: """Check if the installed Pydantic version is 1.x-like.""" - from pydantic import BaseModel - - return IS_PYDANTIC_V2 and issubclass(cls, BaseModel) + return issubclass(cls, BaseModel) def is_basemodel_subclass(cls: type) -> bool: @@ -113,7 +88,6 @@ def is_basemodel_subclass(cls: type) -> bool: Check if the given class is a subclass of any of the following: - * pydantic.BaseModel in Pydantic 1.x * pydantic.BaseModel in Pydantic 2.x * pydantic.v1.BaseModel in Pydantic 2.x """ @@ -121,24 +95,7 @@ def is_basemodel_subclass(cls: type) -> bool: if not inspect.isclass(cls) or isinstance(cls, GenericAlias): return False - if IS_PYDANTIC_V1: - from pydantic import BaseModel as BaseModelV1Proper - - if issubclass(cls, BaseModelV1Proper): - return True - elif IS_PYDANTIC_V2: - from pydantic import BaseModel as BaseModelV2 - from pydantic.v1 import BaseModel as BaseModelV1 - - if issubclass(cls, BaseModelV2): - return True - - if issubclass(cls, BaseModelV1): - return True - else: - msg = f"Unsupported Pydantic version: {PYDANTIC_VERSION.major}" - raise ValueError(msg) - return False + return issubclass(cls, (BaseModel, BaseModelV1)) def is_basemodel_instance(obj: Any) -> bool: @@ -146,28 +103,10 @@ def is_basemodel_instance(obj: Any) -> bool: Check if the given class is an instance of any of the following: - * pydantic.BaseModel in Pydantic 1.x * pydantic.BaseModel in Pydantic 2.x * pydantic.v1.BaseModel in Pydantic 2.x """ - if IS_PYDANTIC_V1: - from pydantic import BaseModel as BaseModelV1Proper - - if isinstance(obj, BaseModelV1Proper): - return True - elif IS_PYDANTIC_V2: - from pydantic import BaseModel as BaseModelV2 - from pydantic.v1 import BaseModel as BaseModelV1 - - if isinstance(obj, BaseModelV2): - return True - - if isinstance(obj, BaseModelV1): - return True - else: - msg = f"Unsupported Pydantic version: {PYDANTIC_VERSION.major}" - raise ValueError(msg) - return False + return isinstance(obj, (BaseModel, BaseModelV1)) # How to type hint this? @@ -183,6 +122,9 @@ def pre_init(func: Callable) -> Any: with warnings.catch_warnings(): warnings.filterwarnings(action="ignore", category=PydanticDeprecationWarning) + # Ideally we would use @model_validator(mode="before") but this would change the + # order of the validators. See https://github.com/pydantic/pydantic/discussions/7434. + # So we keep root_validator for backward compatibility. @root_validator(pre=True) @wraps(func) def wrapper(cls: type[BaseModel], values: dict[str, Any]) -> dict[str, Any]: @@ -243,26 +185,18 @@ def handle_invalid_for_json_schema( def _create_subset_model_v1( name: str, - model: type[BaseModel], + model: type[BaseModelV1], field_names: list, *, descriptions: Optional[dict] = None, fn_description: Optional[str] = None, ) -> type[BaseModel]: """Create a pydantic model with only a subset of model's fields.""" - if IS_PYDANTIC_V1: - from pydantic import create_model - elif IS_PYDANTIC_V2: - from pydantic.v1 import create_model # type: ignore[no-redef] - else: - msg = f"Unsupported pydantic version: {PYDANTIC_VERSION.major}" - raise NotImplementedError(msg) - fields = {} for field_name in field_names: # Using pydantic v1 so can access __fields__ as a dict. - field = model.__fields__[field_name] # type: ignore[index] + field = model.__fields__[field_name] t = ( # this isn't perfect but should work for most functions field.outer_type_ @@ -273,34 +207,31 @@ def _create_subset_model_v1( field.field_info.description = descriptions[field_name] fields[field_name] = (t, field.field_info) - rtn = create_model(name, **fields) # type: ignore[call-overload] + rtn = create_model_v1(name, **fields) # type: ignore[call-overload] rtn.__doc__ = textwrap.dedent(fn_description or model.__doc__ or "") return rtn def _create_subset_model_v2( name: str, - model: type[pydantic.BaseModel], + model: type[BaseModel], field_names: list[str], *, descriptions: Optional[dict] = None, fn_description: Optional[str] = None, -) -> type[pydantic.BaseModel]: +) -> type[BaseModel]: """Create a pydantic model with a subset of the model fields.""" - from pydantic import create_model - from pydantic.fields import FieldInfo - descriptions_ = descriptions or {} fields = {} for field_name in field_names: field = model.model_fields[field_name] description = descriptions_.get(field_name, field.description) - field_info = FieldInfo(description=description, default=field.default) + field_info = FieldInfoV2(description=description, default=field.default) if field.metadata: field_info.metadata = field.metadata fields[field_name] = (field.annotation, field_info) - rtn = create_model( # type: ignore[call-overload] + rtn = _create_model_base( # type: ignore[call-overload] name, **fields, __config__=ConfigDict(arbitrary_types_allowed=True) ) @@ -321,7 +252,7 @@ def _create_subset_model_v2( # Private functionality to create a subset model that's compatible across # different versions of pydantic. -# Handles pydantic versions 1.x and 2.x. including v1 of pydantic in 2.x. +# Handles pydantic versions 2.x. including v1 of pydantic in 2.x. # However, can't find a way to type hint this. def _create_subset_model( name: str, @@ -332,7 +263,7 @@ def _create_subset_model( fn_description: Optional[str] = None, ) -> type[BaseModel]: """Create subset model using the same pydantic version as the input model.""" - if IS_PYDANTIC_V1: + if issubclass(model, BaseModelV1): return _create_subset_model_v1( name, model, @@ -340,68 +271,43 @@ def _create_subset_model( descriptions=descriptions, fn_description=fn_description, ) - if IS_PYDANTIC_V2: - from pydantic.v1 import BaseModel as BaseModelV1 - - if issubclass(model, BaseModelV1): - return _create_subset_model_v1( - name, - model, - field_names, - descriptions=descriptions, - fn_description=fn_description, - ) - return _create_subset_model_v2( - name, - model, - field_names, - descriptions=descriptions, - fn_description=fn_description, - ) - msg = f"Unsupported pydantic version: {PYDANTIC_VERSION.major}" - raise NotImplementedError(msg) + return _create_subset_model_v2( + name, + model, + field_names, + descriptions=descriptions, + fn_description=fn_description, + ) + +@overload +def get_fields(model: type[BaseModel]) -> dict[str, FieldInfoV2]: ... -if IS_PYDANTIC_V2: - from pydantic import BaseModel as BaseModelV2 - from pydantic.v1 import BaseModel as BaseModelV1 - @overload - def get_fields(model: type[BaseModelV2]) -> dict[str, FieldInfoV2]: ... +@overload +def get_fields(model: BaseModel) -> dict[str, FieldInfoV2]: ... - @overload - def get_fields(model: BaseModelV2) -> dict[str, FieldInfoV2]: ... - @overload - def get_fields(model: type[BaseModelV1]) -> dict[str, FieldInfoV1]: ... +@overload +def get_fields(model: type[BaseModelV1]) -> dict[str, FieldInfoV1]: ... - @overload - def get_fields(model: BaseModelV1) -> dict[str, FieldInfoV1]: ... - def get_fields( - model: Union[type[Union[BaseModelV2, BaseModelV1]], BaseModelV2, BaseModelV1], - ) -> Union[dict[str, FieldInfoV2], dict[str, FieldInfoV1]]: - """Get the field names of a Pydantic model.""" - if hasattr(model, "model_fields"): - return model.model_fields +@overload +def get_fields(model: BaseModelV1) -> dict[str, FieldInfoV1]: ... - if hasattr(model, "__fields__"): - return model.__fields__ # type: ignore[return-value] - msg = f"Expected a Pydantic model. Got {type(model)}" - raise TypeError(msg) -elif IS_PYDANTIC_V1: - from pydantic import BaseModel as BaseModelV1_ +def get_fields( + model: Union[type[Union[BaseModel, BaseModelV1]], BaseModel, BaseModelV1], +) -> Union[dict[str, FieldInfoV2], dict[str, FieldInfoV1]]: + """Get the field names of a Pydantic model.""" + if hasattr(model, "model_fields"): + return model.model_fields - def get_fields( # type: ignore[no-redef] - model: Union[type[BaseModelV1_], BaseModelV1_], - ) -> dict[str, FieldInfoV1]: - """Get the field names of a Pydantic model.""" + if hasattr(model, "__fields__"): return model.__fields__ # type: ignore[return-value] + msg = f"Expected a Pydantic model. Got {type(model)}" + raise TypeError(msg) -else: - msg = f"Unsupported Pydantic version: {PYDANTIC_VERSION.major}" - raise ValueError(msg) _SchemaConfig = ConfigDict( arbitrary_types_allowed=True, frozen=True, protected_namespaces=() @@ -545,14 +451,11 @@ def create_model( def _remap_field_definitions(field_definitions: dict[str, Any]) -> dict[str, Any]: """This remaps fields to avoid colliding with internal pydantic fields.""" - from pydantic import Field - from pydantic.fields import FieldInfo - remapped = {} for key, value in field_definitions.items(): if key.startswith("_") or key in _RESERVED_NAMES: # Let's add a prefix to avoid colliding with internal pydantic fields - if isinstance(value, FieldInfo): + if isinstance(value, FieldInfoV2): msg = ( f"Remapping for fields starting with '_' or fields with a name " f"matching a reserved name {_RESERVED_NAMES} is not supported if " diff --git a/libs/core/pyproject.toml b/libs/core/pyproject.toml index ef4ee2f627f84..eda927f95747a 100644 --- a/libs/core/pyproject.toml +++ b/libs/core/pyproject.toml @@ -70,7 +70,6 @@ langchain-text-splitters = { path = "../text-splitters" } strict = "True" strict_bytes = "True" enable_error_code = "deprecated" -report_deprecated_as_note = "True" # TODO: activate for 'strict' checking disallow_any_generics = "False" diff --git a/libs/core/tests/unit_tests/output_parsers/test_openai_tools.py b/libs/core/tests/unit_tests/output_parsers/test_openai_tools.py index 992e5c48c5568..025a075251d66 100644 --- a/libs/core/tests/unit_tests/output_parsers/test_openai_tools.py +++ b/libs/core/tests/unit_tests/output_parsers/test_openai_tools.py @@ -17,7 +17,6 @@ ) from langchain_core.outputs import ChatGeneration from langchain_core.utils.pydantic import ( - IS_PYDANTIC_V1, IS_PYDANTIC_V2, ) @@ -602,41 +601,6 @@ class Forecast(pydantic.BaseModel): ] -@pytest.mark.skipif(not IS_PYDANTIC_V1, reason="This test is for pydantic 1") -def test_parse_with_different_pydantic_1_proper() -> None: - """Test with pydantic.BaseModel from pydantic 1.""" - import pydantic - - class Forecast(pydantic.BaseModel): - temperature: int - forecast: str - - # Can't get pydantic to work here due to the odd typing of tryig to support - # both v1 and v2 in the same codebase. - parser = PydanticToolsParser(tools=[Forecast]) - message = AIMessage( - content="", - tool_calls=[ - { - "id": "call_OwL7f5PE", - "name": "Forecast", - "args": {"temperature": 20, "forecast": "Sunny"}, - } - ], - ) - - generation = ChatGeneration( - message=message, - ) - - assert parser.parse_result([generation]) == [ - Forecast( - temperature=20, - forecast="Sunny", - ) - ] - - def test_max_tokens_error(caplog: Any) -> None: parser = PydanticToolsParser(tools=[NameCollector], first_tool_only=True) input = AIMessage( diff --git a/libs/core/tests/unit_tests/test_tools.py b/libs/core/tests/unit_tests/test_tools.py index 3ffc15ad0fd1f..5887c021a31f1 100644 --- a/libs/core/tests/unit_tests/test_tools.py +++ b/libs/core/tests/unit_tests/test_tools.py @@ -65,7 +65,6 @@ convert_to_openai_tool, ) from langchain_core.utils.pydantic import ( - IS_PYDANTIC_V1, IS_PYDANTIC_V2, _create_subset_model, create_model_v2, @@ -79,9 +78,11 @@ def _get_tool_call_json_schema(tool: BaseTool) -> dict: if isinstance(tool_schema, dict): return tool_schema - if hasattr(tool_schema, "model_json_schema"): + if issubclass(tool_schema, BaseModel): return tool_schema.model_json_schema() - return tool_schema.schema() + if issubclass(tool_schema, BaseModelV1): + return tool_schema.schema() + return {} def test_unnamed_decorator() -> None: @@ -1853,11 +1854,14 @@ def _run(self, *args: Any, **kwargs: Any) -> str: ) input_schema = tool.get_input_schema() - input_json_schema = ( - input_schema.model_json_schema() - if hasattr(input_schema, "model_json_schema") - else input_schema.schema() - ) + if issubclass(input_schema, BaseModel): + input_json_schema = input_schema.model_json_schema() + elif issubclass(input_schema, BaseModelV1): + input_json_schema = input_schema.schema() + else: + msg = "Unknown input schema type" + raise TypeError(msg) + assert input_json_schema == { "properties": { "a": {"title": "A", "type": "integer"}, @@ -1943,12 +1947,14 @@ def foo(a: int, b: str) -> str: assert foo_tool.invoke({"a": 5, "b": "hello"}) == "foo" - args_schema = cast("BaseModel", foo_tool.args_schema) - args_json_schema = ( - args_schema.model_json_schema() - if hasattr(args_schema, "model_json_schema") - else args_schema.schema() - ) + args_schema = cast("type[BaseModel]", foo_tool.args_schema) + if issubclass(args_schema, BaseModel): + args_json_schema = args_schema.model_json_schema() + elif issubclass(args_schema, BaseModelV1): + args_json_schema = args_schema.schema() + else: + msg = "Unknown input schema type" + raise TypeError(msg) assert args_json_schema == { "properties": { "a": {"title": "A", "type": "integer"}, @@ -1960,11 +1966,13 @@ def foo(a: int, b: str) -> str: } input_schema = foo_tool.get_input_schema() - input_json_schema = ( - input_schema.model_json_schema() - if hasattr(input_schema, "model_json_schema") - else input_schema.schema() - ) + if issubclass(input_schema, BaseModel): + input_json_schema = input_schema.model_json_schema() + elif issubclass(input_schema, BaseModelV1): + input_json_schema = input_schema.schema() + else: + msg = "Unknown input schema type" + raise TypeError(msg) assert input_json_schema == { "properties": { "a": {"title": "A", "type": "integer"}, @@ -2089,63 +2097,6 @@ class ModelD(ModelC, Generic[D]): assert actual == expected -@pytest.mark.skipif(not IS_PYDANTIC_V1, reason="Testing pydantic v1.") -def test__get_all_basemodel_annotations_v1() -> None: - A = TypeVar("A") - - class ModelA(BaseModel, Generic[A], extra="allow"): - a: A - - class ModelB(ModelA[str]): - b: Annotated[ModelA[dict[str, Any]], "foo"] - - class Mixin: - def foo(self) -> str: - return "foo" - - class ModelC(Mixin, ModelB): - c: dict - - expected = {"a": str, "b": Annotated[ModelA[dict[str, Any]], "foo"], "c": dict} - actual = get_all_basemodel_annotations(ModelC) - assert actual == expected - - expected = {"a": str, "b": Annotated[ModelA[dict[str, Any]], "foo"]} - actual = get_all_basemodel_annotations(ModelB) - assert actual == expected - - expected = {"a": Any} - actual = get_all_basemodel_annotations(ModelA) - assert actual == expected - - expected = {"a": int} - actual = get_all_basemodel_annotations(ModelA[int]) - assert actual == expected - - D = TypeVar("D", bound=Union[str, int]) - - class ModelD(ModelC, Generic[D]): - d: Optional[D] - - expected = { - "a": str, - "b": Annotated[ModelA[dict[str, Any]], "foo"], - "c": dict, - "d": Union[str, int, None], - } - actual = get_all_basemodel_annotations(ModelD) - assert actual == expected - - expected = { - "a": str, - "b": Annotated[ModelA[dict[str, Any]], "foo"], - "c": dict, - "d": Union[int, None], - } - actual = get_all_basemodel_annotations(ModelD[int]) - assert actual == expected - - def test_tool_annotations_preserved() -> None: """Test that annotations are preserved when creating a tool.""" diff --git a/libs/core/tests/unit_tests/utils/test_pydantic.py b/libs/core/tests/unit_tests/utils/test_pydantic.py index 88ca45ff1d8f0..c99e6d4f6d0ff 100644 --- a/libs/core/tests/unit_tests/utils/test_pydantic.py +++ b/libs/core/tests/unit_tests/utils/test_pydantic.py @@ -7,7 +7,6 @@ from pydantic import ConfigDict from langchain_core.utils.pydantic import ( - IS_PYDANTIC_V1, IS_PYDANTIC_V2, PYDANTIC_VERSION, _create_subset_model_v2, @@ -96,11 +95,7 @@ def validator(cls, v: dict[str, Any]) -> dict[str, Any]: def test_is_basemodel_subclass() -> None: """Test pydantic.""" - if IS_PYDANTIC_V1: - from pydantic import BaseModel as BaseModelV1Proper - - assert is_basemodel_subclass(BaseModelV1Proper) - elif IS_PYDANTIC_V2: + if IS_PYDANTIC_V2: from pydantic import BaseModel as BaseModelV2 from pydantic.v1 import BaseModel as BaseModelV1 @@ -114,14 +109,7 @@ def test_is_basemodel_subclass() -> None: def test_is_basemodel_instance() -> None: """Test pydantic.""" - if IS_PYDANTIC_V1: - from pydantic import BaseModel as BaseModelV1Proper - - class FooV1(BaseModelV1Proper): - x: int - - assert is_basemodel_instance(FooV1(x=5)) - elif IS_PYDANTIC_V2: + if IS_PYDANTIC_V2: from pydantic import BaseModel as BaseModelV2 from pydantic.v1 import BaseModel as BaseModelV1 @@ -168,17 +156,6 @@ class Foo(BaseModelV2): } -@pytest.mark.skipif(not IS_PYDANTIC_V1, reason="Only tests Pydantic v1") -def test_fields_pydantic_v1() -> None: - from pydantic import BaseModel - - class Foo(BaseModel): - x: int - - fields = get_fields(Foo) - assert fields == {"x": Foo.model_fields["x"]} - - @pytest.mark.skipif(not IS_PYDANTIC_V2, reason="Only tests Pydantic v2") def test_fields_pydantic_v2_proper() -> None: from pydantic import BaseModel diff --git a/libs/core/tests/unit_tests/utils/test_utils.py b/libs/core/tests/unit_tests/utils/test_utils.py index 258b447409b97..de79723512099 100644 --- a/libs/core/tests/unit_tests/utils/test_utils.py +++ b/libs/core/tests/unit_tests/utils/test_utils.py @@ -17,7 +17,6 @@ ) from langchain_core.utils._merge import merge_dicts from langchain_core.utils.pydantic import ( - IS_PYDANTIC_V1, IS_PYDANTIC_V2, ) from langchain_core.utils.utils import secret_from_env @@ -243,20 +242,6 @@ class PydanticModel(BaseModel): assert result == expected -@pytest.mark.skipif(not IS_PYDANTIC_V1, reason="Requires pydantic 1") -def test_get_pydantic_field_names_v1() -> None: - from pydantic import BaseModel, Field - - class PydanticModel(BaseModel): - field1: str - field2: int - alias_field: int = Field(alias="aliased_field") - - result = get_pydantic_field_names(PydanticModel) - expected = {"field1", "field2", "aliased_field", "alias_field"} - assert result == expected - - def test_from_env_with_env_variable() -> None: key = "TEST_KEY" value = "test_value" diff --git a/libs/core/uv.lock b/libs/core/uv.lock index 06c9ba2e20a9c..b4a45f315f1ec 100644 --- a/libs/core/uv.lock +++ b/libs/core/uv.lock @@ -1,4 +1,5 @@ version = 1 +revision = 1 requires-python = ">=3.9, <4.0" resolution-markers = [ "python_full_version >= '3.13'", @@ -1104,7 +1105,7 @@ test-integration = [ ] typing = [ { name = "lxml-stubs", specifier = ">=0.5.1,<1.0.0" }, - { name = "mypy", specifier = ">=1.10,<2.0" }, + { name = "mypy", specifier = ">=1.15,<2.0" }, { name = "tiktoken", specifier = ">=0.8.0,<1.0.0" }, { name = "types-requests", specifier = ">=2.31.0.20240218,<3.0.0.0" }, ]