Package maintenance

pydantic-slim

pydantic-slim This is a placeholder in case we want to use this package name in future.

pypi package. Binary | Source

Latest version: 0.0.0 Released: 2024-03-29

pydantic-dict

pydantic_dict A pydantic model subclass that implements Python's dictionary interface. Example: ```python from pydantic_dict import BaseModelDict class User(BaseModelDict): id: int name: str = "Jane Doe" user = User(id=42) user["session_id"] = "95607c42-250a-4913-9dfb-00eb6535e685" assert user.session_id == "95607c42-250a-4913-9dfb-00eb6535e685" assert user["session_id"] == "95607c42-250a-4913-9dfb-00eb6535e685" user.pop("session_id") assert "session_id" not in user assert user.get("last_name", None) is None user.update({"email": "jane.doe@email.com"}) print(user.json()) >>> {"id": 42, "name": "Jane Doe", "email": "jane.doe@email.com"} user.clear() # fields are NOT removed. only non-fields are removed print(user.json()) >>> {"id": 42, "name": "Jane Doe"} user.setdefault("last_active", "2023-01-01T19:56:10Z") del user["last_active"] ``` Unset marker type The Unset marker type provides a way to "mark" that an optional model field is by default not set and is not required to construct the model. This enables more semantic usage of built-in dict methods like get() and setdefault() that can return or set a default value. Likewise, fields that are Unset are not considered to be members of a BaseModelDict dictionary (e.g. "unset_field" not in model_dict) and are not included in __iter__(), keys(), values(), or len(model_dict). This feature is especially useful when refactoring existing code to use pydantic. Example: ```python from pydantic_dict import BaseModelDict, Unset from typing import Optional class User(BaseModelDict): id: int name: str = "Jane Doe" email: Optional[str] = Unset user = User(id=42) assert "email" not in user user["email"] # raises KeyError assert len(user) == 2 assert set(user.keys()) == {"id", "name"} user.setdefault("email", f"{user.id}@service.com") # returns 42@service.com assert "email" in user ``` Install shell pip install pydantic_dict

pypi package. Binary

Latest version: 0.0.3 Released: 2023-06-30

pydantic-marc

pydantic-marc pydantic-marc is a library for validating data against the MARC21 Format for Bibliographic Data. Installation Use pip: $ pip install pydantic-marc Features pydantic-marc uses pydantic, the popular data validation library, to define the valid components of a MARC record. The package expects users will employ pymarc to read MARC records from binary files. Basic usage: Validating a MARC record: ```python from pymarc import MARCReader from rich import print from pydantic_marc import MarcRecord with open("temp/valid.mrc", "rb") as fh: reader = MARCReader(fh) for record in reader: print(record) model = MarcRecord.model_validate(record, from_attributes=True) print(model.model_dump()) json { "leader": "00536nam a22001985i 4500", "fields": [ {"001": "123456789"}, {"008": "201201s2020 nyua 000 1 eng d"}, {"035": {"ind1": " ", "ind2": " ", "subfields": [{"a": "(OCoLC)1234567890"}]}}, {"049": {"ind1": " ", "ind2": " ", "subfields": [{"a": "NYPP"}]}}, { "245": { "ind1": "0", "ind2": "0", "subfields": [ {"a": "Fake :"}, {"b": "Marc Record"}, ] } }, { "264": { "ind1": " ", "ind2": "1", "subfields": [ {"a": "New York :"}, {"b": "NY,"}, {"c": "[2020]"} ] } }, { "300": { "ind1": " ", "ind2": " ", "subfields": [{"a": "100 pages :"}, {"b": "color illustrations;"}, {"c": "30 cm"}] } }, {"336": {"ind1": " ", "ind2": " ", "subfields": [{"a": "text"}, {"b": "txt"}, {"2": "rdacontent"}]}}, {"337": {"ind1": " ", "ind2": " ", "subfields": [{"a": "unmediated"}, {"b": "n"}, {"2": "rdamedia"}]}}, {"338": {"ind1": " ", "ind2": " ", "subfields": [{"a": "volume"}, {"b": "nc"}, {"2": "rdacarrier"}]}} ] } ``` If the record is invalid the errors can be returned as json, a dictionary, or in a human-readable format. JSON Error Message: ```python from pydantic import ValidationError from pymarc import MARCReader from pydantic_marc import MarcRecord with open("temp/invalid.mrc", "rb") as fh: reader = MARCReader(fh) for record in reader: print(record) try: MarcRecord.model_validate(record) except ValidationError as e: # errors as a dictionary print(e.errors()) # errors as json print(e.json()) json [ { "type": "non_repeatable_field", "loc": ("fields", "001"), "msg": "001: Has been marked as a non-repeating field.", "input": "001", "ctx": {"input": "001"} }, { "type": "missing_required_field", "loc": ("fields", "245"), "msg": "One 245 field must be present in a MARC21 record.", "input": "245", "ctx": {"input": "245"} }, { "type": "multiple_1xx_fields", "loc": ("fields", "100", "110"), "msg": "1XX: Only one 1XX tag is allowed. Record contains: ['100', '110']", "input": ["100", "110"], "ctx": {"input": ["100", "110"]} }, { "type": "control_field_length_invalid", "loc": ("fields", "006"), "msg": "006: Length appears to be invalid. Reported length is: 6. Expected length is: 18", "input": "p |", "ctx": {"tag": "006", "valid": 18, "input": "p |", "length": 6} }, { "type": "invalid_indicator", "loc": ("fields", "035", "ind1"), "msg": "035 ind1: Invalid data (0). Indicator should be ['', ' '].", "input": "0", "ctx": {"loc": ("035", "ind1"), "input": "0", "valid": ["", " "], "tag": "035", "ind": "ind1"} }, { "type": "non_repeatable_subfield", "loc": ("fields", "600", "a"), "msg": "600 $a: Subfield cannot repeat.", "input": [PydanticSubfield(code="a", value="Foo"), PydanticSubfield(code="a", value="Foo,")], "ctx": { "loc": ("600", "a"), "input": [PydanticSubfield(code="a", value="Foo"), PydanticSubfield(code="a", value="Foo,")], "tag": "600", "code": "a" } } ] Human-readable Error Message:python from pydantic import ValidationError from pymarc import MARCReader from pydantic_marc import MarcRecord with open("temp/invalid.mrc", "rb") as fh: reader = MARCReader(fh) for record in reader: print(record) try: MarcRecord.model_validate(record) except ValidationError as e: # errors in a human-readable format print(e.errors()) text 6 validation errors for MarcRecord fields.001 001: Has been marked as a non-repeating field. [type=non_repeatable_field, input_value='001', input_type=str] fields.245 One 245 field must be present in a MARC21 record. [type=missing_required_field, input_value='245', input_type=str] fields.100.110 1XX: Only one 1XX tag is allowed. Record contains: ['100', '110'] [type=multiple_1xx_fields, input_value=['100', '110'], input_type=list] fields.006 006: Length appears to be invalid. Reported length is: 6. Expected length is: 18 [type=control_field_length_invalid, input_value='p |', input_type=str] fields.035.ind1 035 ind1: Invalid data (0). Indicator should be ['', ' ']. [type=invalid_indicator, input_value='0', input_type=str] fields.600.a 600 $a: Subfield cannot repeat. [type=non_repeatable_subfield, input_value=[PydanticSubfield(code='a...code='a', value='Foo,')], input_type=list] ```

pypi package. Binary

Latest version: 0.1.0 Released: 2025-03-09

pydantic-xlsx

pydantic-xlsx This Python library tries to ease parsing and dumping data from and to Excel's xlsx (also known as Office Open XML Workbook) with the help of pydantic models. It uses openpyxl to interact with the Excel files. As with pydantic you define the structure of your in- or output xlsx file as a pydantic model. As with pydantic you define the structure of your data as Models with the help of Python's typing system. You can find the API documentation here. State of the project Alpha state. This package started as a module within another application. I'm currently extracting pydantic-xlsx from this project. So expect some rough edges and missing documentation. Motivation and Overview First of all: If there is another way to accomplish your goal without using spreadsheet software or data formats do it. Spreadsheets have many drawbacks in contrasts to »real« databases and programming. Consider using Jupyter if you need some sort of interaction with your data. This package was written for circumstances where you're forced to work with spreadsheet files. The goal of pydantic-xlsx is to make the interaction with such data sources/environments as comfortable as possible while enforcing as much data validation as possible. Another more or less legit use case for this library is the ability to get a quick overview over your data for debugging. To get a first glimpse consider the following example: ```python from enum import Enum from typing import List from pydantic_xlsx import XlsxField, XlsxModel from pydantic_xlsx.money import Euro class Function(str, Enum): boss = "Boss" worker = "Worker" class Employee(XlsxModel): name: str = XlsxField("", alias="Full Name") age: int wage: Euro function: Function class Config: use_enum_values = True allow_population_by_field_name = True class Company(XlsxModel): staff: List[Employee] class Config: freeze_cell = "A2" my_company = Company(staff=[ Employee(name="Morio Rossi", age=42, wage=4200, function=Function.boss), Employee(name="John Doe", age=23, wage=2300, function=Function.worker) ]) my_company.to_file("my-company.xlsx") ``` Results in the following file: You can parse the file using the from_file method. python loaded_company = Company.from_file("my-company.xlsx") print(loaded_company) A word on the Config sub-class: Inside the Employee model the Config sub-class sets two fairly common pydantic options when working with Excel files. use_enum_values represents enums as their values and not as a classpath without this option »Boss« would be represented as function.boss and »Worker« as function.worker. Using the enum value makes the spreadsheet more user-friendly. By setting allow_population_by_field_name to True you can define alternative column names by setting the alias property of a field. Features In-/Exporting structured data from/to Excel while benefiting from Pydantic's mature features like data validation. The correct Excel number-format is applied according to the field's data type. It's also possible to customize the formatting for a specific field. Define document wide fonts as well as alter the font for columns. Enums columns provides the spreadsheet user with a drop-down menu showing all allowed values. The data is referenced as a Worksheet Table in the Xlsx document. Providing more information on the structure of the data and fast sorting per column. Internal money field type which generates the correct Excel number-format. Some currencies (like Euro or US Dollar) are already defined others can be easily created. The format for printing can be controlled withing pydantic_xlsx. Mapping/Composition Unlike data formates supported by pydantic (like JSON or YAML) spreadsheets do not have an straight forward way of arbitrary nesting data structures. This quickly become a problem if your model describes some lists of lists or alike. Undressed this will lead to a wide range of undefined behavior when translating a pydantic model to a spreadsheets. To circumvent this pydantic-xlsx only allows a defined range of nested data structures to be a valid input. This section gives an overview about this types and their mapping into spreadsheets (this process is internally known as »composition«). Single Model This handles all models which do not contain any models as property type. The resulting spreadsheet will contain one sheet with a single row of data. ```python class Employee(XlsxModel): name: str = XlsxField(alias="Full Name") age: int employee = Employee(name="Morio Rossi", age=42) employee.to_file("employee.xlsx") ``` Will result in the following file: As you see the Excel sheet is named after your Model Class (Employee) which contains a single row of data. Single Model Mapping can only represents a single entry of data thus is not very helpful in most cases. Root collection Collection Types Todo. Field options You can alter the appearance and behavior of columns by using XlsxField. The available options are defined in the XlsxFieldInfo Class. Font (font) Alter the font of a specific field. The fonts are defined using the openpyxl Font object (see above for an example). Number Format (number_format) Optional Excel number format code to alter the display of numbers, dates and so on. Pleas refer to the Excel documentation to learn more about the syntax. Document options The library tries to output spreadsheets with some reasonable styles and tweaks. By defining the inner Config class in your model, you can control this behavior and the appearance of the output document. For more information you can consult the documentation on the XlsxConfig class. Header font (header_font) The library expects the first row of every sheet to contain the names of field. Use this option to alter the appearance of this row by defining your own openpyxl Font (learn more about styling with openpyxl here). The field defaults to openpyxl.styles.Font(name="Arial", bold=True). Font (font) Optional openpyxl Font (learn more about styling with openpyxl here) for all of your spreadsheet except the title row. Defaults to None. Freeze Cells (freeze_cell) Freezing cells makes them visible while scrolling trough your document (learn more in the Excel documentation). This is especially to pin the header row. This is also what pydantic-xlsx is doing by default (freeze cell at A2) Todo: The rest Known pitfalls Massive amount of empty cells when loading a Spreadsheet with data validation (Generics) Cause: pydantic-xlsx uses a method of openpyxl to determine the dimension of the data area (aka the part of the spreadsheet actually containing some data). A cell is treated as non-empty (thus expanding the size of the imported area) from the point some properties where set for this cell. Defining a valid data range is on of them. If a user accidentally define a valid data range for the whole column you end up with pydanic-xlsx trying to import and validate thousands of seemingly empty rows. Solution. This is exactly why you shouldn't use spreadsheets in the first place. The only solution is to manually delete all formatting etc from all worksheets. Or just copy the relevant data into a new spreadsheet (including the header).

pypi package. Binary

Latest version: 0.1.0 Released: 2025-01-25

BIDS-pydantic

BIDS-pydantic Overview BIDS-pydantic will pull a specified version (from v1.7.0 onwards, tested up to v1.7.0) of the BIDS metadata schema which is used in the JSON BIDS sidecar, from the official BIDS GitHub page, and create corresponding pydantic models, which will provide BIDS data validation using python type annotations. Alternatively, the BIDS-pydantic-models package will only install the models for direct use in your Python software. More information on the use of the models can be found here. Table of Contents Quickstart Installation Usage Development Acknowledgements License How To Contribute Got a great idea for something to implement in BIDS-pydantic, or maybe you have just found a bug? Create an issue to get in touch with the development team and we’ll take it from there. Quickstart If you just want to use the models in your project. Download the pydantic models file for the BIDS schema version you wish to use from the models directory, and add it to your code-base. These files are generated using the bids-pydantic make -a command (see below). Alternatively, you can just run: sh $ pip install bids-pydantic-models More information on the use of the models can be found here. If you want to use the command line tool to generate the models, keep reading this README. Installation Install with: sh $ pip install bids-pydantic BIDS-pydantic can be installed as a module directly from the python package index. For more information how to use a python package in this way please see https://docs.python.org/3/installing/index.html Python Version We recommend using the latest version of Python. BIDS-pydantic supports Python 3.9 and newer. Dependencies These distributions will be installed automatically when installing BIDS-pydantic. pydantic datamodel-code-generator Usage The primary commands can be viewed with the command bids-pydantic: ``` usage: bids-pydantic [-h] {list,make} ... Run one of a set of commands. For example: bids-pydantic list, or bids-pydantic make. Run either command with -h e.g. bids-pydantic make -h to get help for that command. optional arguments: -h, --help show this help message and exit command: {list,make} subcommand to run ``` The list command help can be viewed with the command bids-pydantic list -h: ``` usage: bids-pydantic list [-h] Queries the GitHub API and lists the available supported BIDS schema versions. Only tested up to v1.7.0. optional arguments: -h, --help show this help message and exit ``` The make command help can be viewed with the command bids-pydantic make -h: ``` usage: bids-pydantic make [-h] [--output OUTPUT] [--output-all OUTPUT_ALL] [--schema-version [SCHEMA_VERSION]] Make a new python file(s) containing BIDS compliant pydantic models optional arguments: -h, --help show this help message and exit --output OUTPUT, -o OUTPUT The output python filename to create (will output to stdout console if not specified). --output-all OUTPUT_ALL, -a OUTPUT_ALL Find all parsable schemas and output each to the provided directory. Will create filenames such as bids_schema_model_v_1_7_0.py, etc. Will overwrite any files in that directory with the same name. --schema-version [SCHEMA_VERSION] The BIDS schema version to use. e.g. 1.7.0 - supported versions can be discovered using the list command. If a version is not specified v1.7.0 will be used. --input INPUT, -i INPUT Specify an input BIDS metadata (yml) file to use instead of downloading a version from GitHub. Cannot be used with --schema-version or --output-all ``` Development Development dependencies should be installed using pip install -r requirements/dev.txt -U, and pre-commit install then run to install code-quality Git hooks. Development should be carried out using Python 3.8. Development must comply with a few code styling/quality rules and processes: Before pushing any code, make sure the CHANGELOG.md is updated as per the instructions in the CHANGELOG.md file. tox should also be run to ensure that tests and code-quality checks pass. The README.md file should be updated with any usage or development instructions. Ensure that a good level of test coverage is kept. The test reports will be committed to the CI system when testing is run, and these will be made available during code review. If you wish to view test coverage locally, run coverage report. To ensure these code quality rules are kept to, pre-commit should be installed (see the requirements/dev.txt), and pre-commit install run when first cloning this repo. This will install some pre-commit hooks that will ensure any committed code meets the minimum code-quality and is formatted correctly before being committed to Git. This will ensure that tests will pass on CI system after code is pushed. The tools should also be included in any IDEs/editors used, where possible. To run manually, run precommit run --all-files. The following software tools will be run: mypy pylint black isort pyupgrade Acknowledgements Conversion from schema to pydantic models is carried out using datamodel-code-generator. Data validation is performed using pydantic. License You can check out the full license here This project is licensed under the terms of the MIT license.

pypi package. Binary | Source

Latest version: 0.0.3 Released: 2023-06-08

pydantic-bind

pydantic-bind Table of Contents Overview Getting Started Why Not Protobufs ? No Copy Supported Types Inheritance Msgpack Namespaces Generated Code Other Languages Overview Python is the language of choice for finance, data science etc. Python calling C++ (and increasingly, Rust) is a common pattern, leveraging packages such as pybind11 . A common problem is a how best to represent data to be shared between python and C++ code. One would like idiomatic representations in each language and this may be necessary to fully utilise certain python packages. E.g., FastAPI is a popular way to create REST services, using Open API definitions derived from pydantic classes. Therefore, a data model authored using pydantic classes, or native python dataclasses, from which sensible C++ structs and appropriate marshalling can automatically be generated, is desirable. This package provides such tools: a cmake rule allows you to generate C++ structs (with msgpack serialisation) and corresponding pybind11 bindings. Python functions allow you to naviagte between the C++ pybind11 objects and the native python objects. There is also an option for all python operations to be directed to an owned pybind11 object (see No Copy). Note that the typcal python developer experience is now somewhat changed, in that it's necessary to build/install the project. I personally use JetBrains CLion, in place of PyCharm for such projects. For an example of the kind of behaviour-less object model this package is intended to help, please see (the rather nascent) fin-data-model Getting Started pydantic_bind adds a custom cmake rule: pydantic_bind_add_package() This rule will do the following: - scan for sub-packages - scan each sub-package for all .py files - add custom steps for generating .cpp/.h files from any of the following, encounted in the .py files: - dataclasses - classes derived from pydantic's BaseModel - enums C++ directory and namespace structure will match the python package structure (see Namespaces). You can create an instance of the pybind11 class from your original using get_pybind_instance(), e.g., my_class.py: from dataclasses import dataclass @dataclass clas MyClass: my_int: int my_string: str | None CMakeLists.txt: cmake_minimum_required(VERSION 3.9) project(my_project) set(CMAKE_CXX_STANDARD 20) find_package(python3 REQUIRED COMPONENTS Interpreter Development) find_package(pydantic_bind REQUIRED COMPONENTS HINTS "${python3_SITELIB}") pydantic_bind_add_package(my_package) my_util.py from pydantic_bind import get_pybind_value from my_package.my_class imnport MyClass orig = MyClass(my_int=123, my_string="hello") generated = get_pybind_value(orig) print(f"my_int: {orig.my_int}, {generated.my_int}") Why Not Protobufs? I personally find protobufs to be a PITA to use: they have poor to no variant support, the generated code is ugly and idiosyncratic, they're large and painful to copy around etc. AVRO is more friendly but generates python classes dynamically, which confuses IDEs like Pycharm. I do think a good solution is something like pydantic_avro where one can define the classes using pydantic, generate the AVRO schema and then the generateed C++ etc. I might well try and converge this project with that approach. I was inspired to some degree by this blog. No Copy One annoyance of multi-language representations of data objects is that you often end up copying data around where you'd prefer to share a single copy. This is the raison d'etre for Protobufs and its ilk. In this project I've created implementations of BaseModel and dataclass which allow python to use the underlying C++ data representation, rather than holding its own copy. Deriving from this BaseModel will give you equivalent functionality of as pydantic's BaseModel. The annotations are re-written using computed_field, with property getters and setters operating on the generated pybind class, which is instantiated behind the scenes in __init__. Note that this will make some operations (especially those that access dict) less efficient. I've also plumbed the computed fields into the JSON schema, so these objects can be used with FastAPI. dataclass works similarly, adding properties to the dataclass, so that the exisitng get and set functionality works seamless in accessing the generated pybind11 class (also set via a shimmed __init__). Using regular dataclass or BaseModel as members of classes defined with the pydantic_bind versions is very inefficient and not recommended. Supported Types The following python -> C++ mappings are supported (there are likely others I should consider): bool --> bool float --> double int --> int str --> std::string datetime.date --> std::chrono::system_clock::time_point datetime.datetime --> std::chrono::system_clock::time_point datetime.time --> std::chrono::system_clock::time_point datetime.timedelta --> std::chrono::duration pydantic.BaseModel --> struct pydantic_bind.BaseModel --> struct dataclass --> struct pydantic_bind.dataclass --> struct Enum --> enum class Inheritance I have tested single inheritance (see Generated Code). Multiple inheritance may work ... or it may not. I'd generally advise against using it for data classes. Msgpack A rather rudimentary msgpack implementation is added to the generated C++ structs, using a slightly modified version of cpppack. It wasn't clear to me whether this package is maintained or accepting submissions, so I copied and slightly modified msgpack.h (also, I couldn't work out how to add to my project with my rather rudimentary cmake skillz!) Changes include: Fixing includes Support for std::optional Support for std::variant Support for enums A likely future enhancement will be to use cereal and add a mgspack adaptor. The no-copy python objects add to_msg_pack() and from_msg_pack() (the latter being a class method), to access this functionality. Namespaces Directory structure and namespaces in the generated C++ match the python package and module names. cmake requires unique target names and pybind11 requires that the filename (minus the OS-speicific qualifiers) matches the module name. Generated Code Code is generated into a directory structure underneath /generated. Headers are installed to /include. Compiled pybind11 modules are installed into /__pybind__. For C++ usage, you need only the headers, the compiled code is for pybind/python usage only. For the example below, common_object_model/common_object_model/v1/common/__pybind__/foo.cpython-311-darwin.so will be installed (obviously with corresponding qualifiers for Linux/Windows). get_pybind_value() searches this directory. Imports/includes should work seamlessly (the python import scheme will be copied). I have tested this but not completely rigorously. common_object_model/common_object_model/v1/common/foo.py: from dataclasses import dataclass import datetime as dt from enum import Enum, auto from typing import Union from pydantic_bind import BaseModel class Weekday(Enum): MONDAY = auto() TUESDAY = auto() WEDNESDAY = auto() THURSDAY = auto() FRIDAY = auto() SATURDAY = auto() SUNDAY = auto() @dataclass class DCFoo: my_int: int my_string: str | None class Foo(BaseModel): my_bool: bool = True my_day: Weekday = Weekday.SUNDAY class Bar(Foo): my_int: int = 123 my_string: str my_optional_string: str | None = None class Baz(BaseModel): my_variant: Union[str, float] = 123. my_date: dt.date my_foo: Foo my_dc_foo: DCFoo will generate the following files: common_object_model/generated/common_object_model/v1/common/foo.h: #ifndef COMMON_OBJECT_MODEL_FOO_H #define COMMON_OBJECT_MODEL_FOO_H #include #include #include #include #include namespace common_object_model::v1::common { enum Weekday { MONDAY = 1, TUESDAY = 2, WEDNESDAY = 3, THURSDAY = 4, FRIDAY = 5, SATURDAY = 6, SUNDAY = 7 }; struct DCFoo { DCFoo() : my_string(), my_int() { } DCFoo(std::optional my_string, int my_int) : my_string(my_string), my_int(my_int) { } std::optional my_string; int my_int; MSGPACK_DEFINE(my_string, my_int); }; struct Foo { Foo(bool my_bool=true, Weekday my_day=SUNDAY) : my_bool(my_bool), my_day(my_day) { } bool my_bool; Weekday my_day; MSGPACK_DEFINE(my_bool, my_day); }; struct Bar : public Foo { Bar() : Foo(), my_string(), my_int(123), my_optional_string(std::nullopt) { } Bar(std::string my_string, bool my_bool=true, Weekday my_day=SUNDAY, int my_int=123, std::optional my_optional_string=std::nullopt) : Foo(my_bool, my_day), my_string(std::move(my_string)), my_int(my_int), my_optional_string(my_optional_string) { } std::string my_string; int my_int; std::optional my_optional_string; MSGPACK_DEFINE(my_string, my_bool, my_day, my_int, my_optional_string); }; struct Baz { Baz() : my_dc_foo(), my_foo(), my_date(), my_variant(123.0) { } Baz(DCFoo my_dc_foo, Foo my_foo, std::chrono::system_clock::time_point my_date, std::variant my_variant=123.0) : my_dc_foo(std::move(my_dc_foo)), my_foo(std::move(my_foo)), my_date(my_date), my_variant(my_variant) { } DCFoo my_dc_foo; Foo my_foo; std::chrono::system_clock::time_point my_date; std::variant my_variant; MSGPACK_DEFINE(my_dc_foo, my_foo, my_date, my_variant); }; } // common_object_model #endif // COMMON_OBJECT_MODEL_FOO_H common_object_model/generated/common_object_model/v1/common/foo.cpp: #include #include #include #include "foo.h" namespace py = pybind11; using namespace common_object_model::v1::common; PYBIND11_MODULE(common_object_model_v1_common_foo, m) { py::enum_(m, "Weekday").value("MONDAY", Weekday::MONDAY) .value("TUESDAY", Weekday::TUESDAY) .value("WEDNESDAY", Weekday::WEDNESDAY) .value("THURSDAY", Weekday::THURSDAY) .value("FRIDAY", Weekday::FRIDAY) .value("SATURDAY", Weekday::SATURDAY) .value("SUNDAY", Weekday::SUNDAY); py::class_(m, "DCFoo") .def(py::init<>()) .def(py::init, int>(), py::arg("my_string"), py::arg("my_int")) .def("to_msg_pack", &DCFoo::to_msg_pack) .def_static("from_msg_pack", &DCFoo::from_msg_pack) .def_readwrite("my_string", &DCFoo::my_string) .def_readwrite("my_int", &DCFoo::my_int); py::class_(m, "Foo") .def(py::init(), py::arg("my_bool")=true, py::arg("my_day")=SUNDAY) .def("to_msg_pack", &Foo::to_msg_pack) .def_static("from_msg_pack", &Foo::from_msg_pack) .def_readwrite("my_bool", &Foo::my_bool) .def_readwrite("my_day", &Foo::my_day); py::class_(m, "Bar") .def(py::init<>()) .def(py::init>(), py::arg("my_string"), py::arg("my_bool")=true, py::arg("my_day")=SUNDAY, py::arg("my_int")=123, py::arg("my_optional_string")=std::nullopt) .def("to_msg_pack", &Bazr:to_msg_pack) .def_static("from_msg_pack", &Bar::from_msg_pack) .def_readwrite("my_string", &Bar::my_string) .def_readwrite("my_int", &Bar::my_int) .def_readwrite("my_optional_string", &Bar::my_optional_string); py::class_(m, "Baz") .def(py::init<>()) .def(py::init>(), py::arg("my_dc_foo"), py::arg("my_foo"), py::arg("my_date"), py::arg("my_variant")=123.0) .def("to_msg_pack", &Baz::to_msg_pack) .def_static("from_msg_pack", &Baz::from_msg_pack) .def_readwrite("my_dc_foo", &Baz::my_dc_foo) .def_readwrite("my_foo", &Baz::my_foo) .def_readwrite("my_date", &Baz::my_date) .def_readwrite("my_variant", &Baz::my_variant); } Other languages When time allows, I will look at adding support for Rust. There is limited value in generating Java or C# classes; calling those VM-based lanagues in-process from python has never worked well, in my experience.

pypi package. Binary

Latest version: 1.0.5 Released: 2023-10-27

pydantic-yaml

Pydantic-YAML Pydantic-YAML adds YAML capabilities to Pydantic, which is an excellent Python library for data validation and settings management. If you aren't familiar with Pydantic, I would suggest you first check out their docs. Documentation on ReadTheDocs.org Basic Usage ```python from enum import Enum from pydantic import BaseModel, validator from pydantic_yaml import parse_yaml_raw_as, to_yaml_str class MyEnum(str, Enum): """A custom enumeration that is YAML-safe.""" a = "a" b = "b" class InnerModel(BaseModel): """A normal pydantic model that can be used as an inner class.""" fld: float = 1.0 class MyModel(BaseModel): """Our custom Pydantic model.""" x: int = 1 e: MyEnum = MyEnum.a m: InnerModel = InnerModel() @validator("x") def _chk_x(cls, v: int) -> int: # noqa """You can add your normal pydantic validators, like this one.""" assert v > 0 return v m1 = MyModel(x=2, e="b", m=InnerModel(fld=1.5)) This dumps to YAML and JSON respectively yml = to_yaml_str(m1) jsn = m1.json() This parses YAML as the MyModel type m2 = parse_yaml_raw_as(MyModel, yml) assert m1 == m2 JSON is also valid YAML, so this works too m3 = parse_yaml_raw_as(MyModel, jsn) assert m1 == m3 ``` With Pydantic v2, you can also dump dataclasses: ```python from pydantic import RootModel from pydantic.dataclasses import dataclass from pydantic.version import VERSION as PYDANTIC_VERSION from pydantic_yaml import to_yaml_str assert PYDANTIC_VERSION >= "2" @dataclass class YourType: foo: str = "bar" obj = YourType(foo="wuz") assert to_yaml_str(RootModelYourType) == 'foo: wuz\n' ``` Configuration Currently we use the JSON dumping of Pydantic to perform most of the magic. This uses the Config inner class, as in Pydantic: python class MyModel(BaseModel): # ... class Config: # You can override these fields, which affect JSON and YAML: json_dumps = my_custom_dumper json_loads = lambda x: MyModel() # As well as other Pydantic configuration: allow_mutation = False You can control some YAML-specfic options via the keyword options: python to_yaml_str(model, indent=4) # Makes it wider to_yaml_str(model, map_indent=9, sequence_indent=7) # ... you monster. You can additionally pass your own YAML instance: python from ruamel.yaml import YAML my_writer = YAML(typ="safe") my_writer.default_flow_style = True to_yaml_file("foo.yaml", model, custom_yaml_writer=my_writer) A separate configuration for YAML specifically will be added later, likely in v2. Breaking Changes for pydantic-yaml V1 The API for pydantic-yaml version 1.0.0 has been greatly simplified! Mixin Class This functionality has currently been removed! YamlModel and YamlModelMixin base classes are no longer needed. The plan is to re-add it before v1 fully releases, to allow the .yaml() or .parse_*() methods. However, this will be availble only for pydantic<2. Versioned Models This functionality has been removed, as it's questionably useful for most users. There is an example in the docs that's available.

pypi package. Binary | Source

Latest version: 1.4.0 Released: 2024-11-11

pydantic-form

pydantic-form json-schema Form generator using pydantic

pypi package. Binary | Source

Latest version: 0.0.1 Released: 2022-08-22

bump-pydantic

Bump Pydantic ♻️ Bump Pydantic is a tool to help you migrate your code from Pydantic V1 to V2. [!NOTE]\ If you find bugs, please report them on the issue tracker. Table of contents Bump Pydantic ♻️ Table of contents Installation Usage Check diff before applying changes Apply changes Rules BP001: Add default None to Optional[T], Union[T, None] and Any fields BP002: Replace Config class by model_config attribute BP003: Replace Field old parameters to new ones BP004: Replace imports BP005: Replace GenericModel by BaseModel BP006: Replace __root__ by RootModel BP007: Replace decorators BP008: Replace con* functions by Annotated versions BP009: Mark pydantic "protocol" functions in custom types with proper TODOs License Installation The installation is as simple as: bash pip install bump-pydantic Usage bump-pydantic is a CLI tool, hence you can use it from your terminal. It's easy to use. If your project structure is: bash repository/ └── my_package/ └── Then you'll want to do: bash cd /path/to/repository bump-pydantic my_package Check diff before applying changes To check the diff before applying the changes, you can run: bash bump-pydantic --diff Apply changes To apply the changes, you can run: bash bump-pydantic Rules You can find below the list of rules that are applied by bump-pydantic. It's also possible to disable rules by using the --disable option. BP001: Add default None to Optional[T], Union[T, None] and Any fields ✅ Add default None to Optional[T] fields. The following code will be transformed: py class User(BaseModel): name: Optional[str] Into: py class User(BaseModel): name: Optional[str] = None BP002: Replace Config class by model_config attribute ✅ Replace Config class by model_config = ConfigDict(). ✅ Rename old Config attributes to new model_config attributes. ✅ Add a TODO comment in case the transformation can't be done automatically. ✅ Replace Extra enum by string values. The following code will be transformed: ```py from pydantic import BaseModel, Extra class User(BaseModel): name: str class Config: extra = Extra.forbid ``` Into: ```py from pydantic import ConfigDict, BaseModel class User(BaseModel): name: str model_config = ConfigDict(extra="forbid") ``` BP003: Replace Field old parameters to new ones ✅ Replace Field old parameters to new ones. ✅ Replace field: Enum = Field(Enum.VALUE, const=True) by field: Literal[Enum.VALUE] = Enum.VALUE. The following code will be transformed: ```py from typing import List from pydantic import BaseModel, Field class User(BaseModel): name: List[str] = Field(..., min_items=1) ``` Into: ```py from typing import List from pydantic import BaseModel, Field class User(BaseModel): name: List[str] = Field(..., min_length=1) ``` BP004: Replace imports ✅ Replace BaseSettings from pydantic to pydantic_settings. ✅ Replace Color and PaymentCardNumber from pydantic to pydantic_extra_types. BP005: Replace GenericModel by BaseModel ✅ Replace GenericModel by BaseModel. The following code will be transformed: ```py from typing import Generic, TypeVar from pydantic.generics import GenericModel T = TypeVar('T') class User(GenericModel, Generic[T]): name: str ``` Into: ```py from typing import Generic, TypeVar from pydantic import BaseModel T = TypeVar('T') class User(BaseModel, Generic[T]): name: str ``` BP006: Replace __root__ by RootModel ✅ Replace __root__ by RootModel. The following code will be transformed: ```py from typing import List from pydantic import BaseModel class User(BaseModel): age: int name: str class Users(BaseModel): root = List[User] ``` Into: ```py from typing import List from pydantic import RootModel, BaseModel class User(BaseModel): age: int name: str class Users(RootModel[List[User]]): pass ``` BP007: Replace decorators ✅ Replace @validator by @field_validator. ✅ Replace @root_validator by @model_validator. The following code will be transformed: ```py from pydantic import BaseModel, validator, root_validator class User(BaseModel): name: str @validator('name', pre=True) def validate_name(cls, v): return v @root_validator(pre=True) def validate_root(cls, values): return values ``` Into: ```py from pydantic import BaseModel, field_validator, model_validator class User(BaseModel): name: str @field_validator('name', mode='before') def validate_name(cls, v): return v @model_validator(mode='before') def validate_root(cls, values): return values ``` BP008: Replace con* functions by Annotated versions ✅ Replace constr(*args) by Annotated[str, StringConstraints(*args)]. ✅ Replace conint(*args) by Annotated[int, Field(*args)]. ✅ Replace confloat(*args) by Annotated[float, Field(*args)]. ✅ Replace conbytes(*args) by Annotated[bytes, Field(*args)]. ✅ Replace condecimal(*args) by Annotated[Decimal, Field(*args)]. ✅ Replace conset(T, *args) by Annotated[Set[T], Field(*args)]. ✅ Replace confrozenset(T, *args) by Annotated[Set[T], Field(*args)]. ✅ Replace conlist(T, *args) by Annotated[List[T], Field(*args)]. The following code will be transformed: ```py from pydantic import BaseModel, constr class User(BaseModel): name: constr(min_length=1) ``` Into: ```py from pydantic import BaseModel, StringConstraints from typing_extensions import Annotated class User(BaseModel): name: Annotated[str, StringConstraints(min_length=1)] ``` BP009: Mark Pydantic "protocol" functions in custom types with proper TODOs ✅ Mark __get_validators__ as to be replaced by __get_pydantic_core_schema__. ✅ Mark __modify_schema__ as to be replaced by __get_pydantic_json_schema__. The following code will be transformed: ```py class SomeThing: @classmethod def get_validators(cls): yield from [] @classmethod def __modify_schema__(cls, field_schema, field): if field: field_schema['example'] = "Weird example" ``` Into: ``py class SomeThing: @classmethod # TODO[pydantic]: We couldn't refactorget_validators, please create theget_pydantic_core_schema` manually. # Check https://docs.pydantic.dev/latest/migration/#defining-custom-types for more information. def get_validators(cls): yield from [] @classmethod # TODO[pydantic]: We couldn't refactor `__modify_schema__`, please create the `__get_pydantic_json_schema__` manually. # Check https://docs.pydantic.dev/latest/migration/#defining-custom-types for more information. def __modify_schema__(cls, field_schema, field): if field: field_schema['example'] = "Weird example" ``` License This project is licensed under the terms of the MIT license.

pypi package. Binary | Source

Latest version: 0.8.0 Released: 2023-12-28

pydantic-view

Pydantic view helper decorator Installation bash pip install pydantic_view Usage ```python In [1]: from pydantic import BaseModel, Field ...: from pydantic_view import view ...: ...: ...: class User(BaseModel): ...: id: int ...: username: str ...: password: str ...: address: str ...: ...: @view("Create", exclude={"id"}) ...: class UserCreate(User): ...: pass ...: ...: @view("Update") ...: class UserUpdate(User): ...: pass ...: ...: @view("Patch") ...: class UserPatch(User): ...: username: str = None ...: password: str = None ...: address: str = None ...: ...: @view("Out", exclude={"password"}) ...: class UserOut(User): ...: pass In [2]: user = User(id=0, username="human", password="iamaman", address="Earth") ...: user.Out() ...: Out[2]: UserOut(id=0, username='human', address='Earth') In [3]: User.Update(id=0, username="human", password="iamasuperman", address="Earth") ...: Out[3]: UserUpdate(id=0, username='human', password='iamasuperman', address='Earth') In [4]: User.Patch(id=0, address="Mars") ...: Out[4]: UserPatch(id=0, username=None, password=None, address='Mars') ``` FastAPI example ```python from typing import Optional from fastapi import FastAPI from fastapi.testclient import TestClient from pydantic import BaseModel, ConfigDict, Field from pydantic_view import view, view_field_validator class UserSettings(BaseModel): model_config = ConfigDict(extra="forbid") public: Optional[str] = None secret: Optional[str] = None @view("Out", exclude={"secret"}) class UserSettingsOut(UserSettings): pass @view("Create") class UserSettingsCreate(UserSettings): pass @view("Update") class UserSettingsUpdate(UserSettings): pass @view("Patch") class UserSettingsPatch(UserSettings): public: str = None secret: str = None class User(BaseModel): model_config = ConfigDict(extra="forbid") id: int username: str password: str = Field(default_factory=lambda: "password") settings: UserSettings @view_field_validator({"Create", "Update", "Patch"}, "username") @classmethod def validate_username(cls, v): if len(v) < 3: raise ValueError return v @view("Out", exclude={"password"}) class UserOut(User): pass @view("Create", exclude={"id"}) class UserCreate(User): settings: UserSettings = Field(default_factory=UserSettings) @view("Update", exclude={"id"}) class UserUpdate(User): pass @view("Patch", exclude={"id"}) class UserPatch(User): username: str = None password: str = None settings: UserSettings = None app = FastAPI() db = {} @app.get("/users/{user_id}", response_model=User.Out) async def get(user_id: int) -> User.Out: return db[user_id] @app.post("/users", response_model=User.Out) async def post(user: User.Create) -> User.Out: user_id = 0 # generate_user_id() db[0] = User(id=user_id, **user.model_dump()) return db[0] @app.put("/users/{user_id}", response_model=User.Out) async def put(user_id: int, user: User.Update) -> User.Out: db[user_id] = User(id=user_id, **user.model_dump()) return db[user_id] @app.patch("/users/{user_id}", response_model=User.Out) async def patch(user_id: int, user: User.Patch) -> User.Out: db[user_id] = User({db[user_id].model_dump(), **user.model_dump(exclude_unset=True)}) return db[user_id] def test_fastapi(): client = TestClient(app) # POST response = client.post( "/users", json={ "username": "admin", "password": "admin", }, ) assert response.status_code == 200, response.text assert response.json() == { "id": 0, "username": "admin", "settings": {"public": None}, } # GET response = client.get("/users/0") assert response.status_code == 200, response.text assert response.json() == { "id": 0, "username": "admin", "settings": {"public": None}, } # PUT response = client.put( "/users/0", json={ "username": "superadmin", "password": "superadmin", "settings": {"public": "foo", "secret": "secret"}, }, ) assert response.status_code == 200, response.text assert response.json() == { "id": 0, "username": "superadmin", "settings": {"public": "foo"}, } # PATCH response = client.patch( "/users/0", json={ "username": "guest", "settings": {"public": "bar"}, }, ) assert response.status_code == 200, response.text assert response.json() == { "id": 0, "username": "guest", "settings": {"public": "bar"}, } ```

pypi package. Binary

Latest version: 2.0.1 Released: 2024-10-14