Package maintenance

pydantic-dict

pydantic_dict A pydantic model subclass that implements Python's dictionary interface. Example: ```python from pydantic_dict import BaseModelDict class User(BaseModelDict): id: int name: str = "Jane Doe" user = User(id=42) user["session_id"] = "95607c42-250a-4913-9dfb-00eb6535e685" assert user.session_id == "95607c42-250a-4913-9dfb-00eb6535e685" assert user["session_id"] == "95607c42-250a-4913-9dfb-00eb6535e685" user.pop("session_id") assert "session_id" not in user assert user.get("last_name", None) is None user.update({"email": "jane.doe@email.com"}) print(user.json()) >>> {"id": 42, "name": "Jane Doe", "email": "jane.doe@email.com"} user.clear() # fields are NOT removed. only non-fields are removed print(user.json()) >>> {"id": 42, "name": "Jane Doe"} user.setdefault("last_active", "2023-01-01T19:56:10Z") del user["last_active"] ``` Unset marker type The Unset marker type provides a way to "mark" that an optional model field is by default not set and is not required to construct the model. This enables more semantic usage of built-in dict methods like get() and setdefault() that can return or set a default value. Likewise, fields that are Unset are not considered to be members of a BaseModelDict dictionary (e.g. "unset_field" not in model_dict) and are not included in __iter__(), keys(), values(), or len(model_dict). This feature is especially useful when refactoring existing code to use pydantic. Example: ```python from pydantic_dict import BaseModelDict, Unset from typing import Optional class User(BaseModelDict): id: int name: str = "Jane Doe" email: Optional[str] = Unset user = User(id=42) assert "email" not in user user["email"] # raises KeyError assert len(user) == 2 assert set(user.keys()) == {"id", "name"} user.setdefault("email", f"{user.id}@service.com") # returns 42@service.com assert "email" in user ``` Install shell pip install pydantic_dict

pypi package. Binary

Latest version: 0.0.3 Released: 2023-06-30

pydantic-view

Pydantic view helper decorator Installation bash pip install pydantic_view Usage ```python In [1]: from pydantic import BaseModel, Field ...: from pydantic_view import view ...: ...: ...: class User(BaseModel): ...: id: int ...: username: str ...: password: str ...: address: str ...: ...: @view("Create", exclude={"id"}) ...: class UserCreate(User): ...: pass ...: ...: @view("Update") ...: class UserUpdate(User): ...: pass ...: ...: @view("Patch") ...: class UserPatch(User): ...: username: str = None ...: password: str = None ...: address: str = None ...: ...: @view("Out", exclude={"password"}) ...: class UserOut(User): ...: pass In [2]: user = User(id=0, username="human", password="iamaman", address="Earth") ...: user.Out() ...: Out[2]: UserOut(id=0, username='human', address='Earth') In [3]: User.Update(id=0, username="human", password="iamasuperman", address="Earth") ...: Out[3]: UserUpdate(id=0, username='human', password='iamasuperman', address='Earth') In [4]: User.Patch(id=0, address="Mars") ...: Out[4]: UserPatch(id=0, username=None, password=None, address='Mars') ``` FastAPI example ```python from typing import Optional from fastapi import FastAPI from fastapi.testclient import TestClient from pydantic import BaseModel, ConfigDict, Field from pydantic_view import view, view_field_validator class UserSettings(BaseModel): model_config = ConfigDict(extra="forbid") public: Optional[str] = None secret: Optional[str] = None @view("Out", exclude={"secret"}) class UserSettingsOut(UserSettings): pass @view("Create") class UserSettingsCreate(UserSettings): pass @view("Update") class UserSettingsUpdate(UserSettings): pass @view("Patch") class UserSettingsPatch(UserSettings): public: str = None secret: str = None class User(BaseModel): model_config = ConfigDict(extra="forbid") id: int username: str password: str = Field(default_factory=lambda: "password") settings: UserSettings @view_field_validator({"Create", "Update", "Patch"}, "username") @classmethod def validate_username(cls, v): if len(v) < 3: raise ValueError return v @view("Out", exclude={"password"}) class UserOut(User): pass @view("Create", exclude={"id"}) class UserCreate(User): settings: UserSettings = Field(default_factory=UserSettings) @view("Update", exclude={"id"}) class UserUpdate(User): pass @view("Patch", exclude={"id"}) class UserPatch(User): username: str = None password: str = None settings: UserSettings = None app = FastAPI() db = {} @app.get("/users/{user_id}", response_model=User.Out) async def get(user_id: int) -> User.Out: return db[user_id] @app.post("/users", response_model=User.Out) async def post(user: User.Create) -> User.Out: user_id = 0 # generate_user_id() db[0] = User(id=user_id, **user.model_dump()) return db[0] @app.put("/users/{user_id}", response_model=User.Out) async def put(user_id: int, user: User.Update) -> User.Out: db[user_id] = User(id=user_id, **user.model_dump()) return db[user_id] @app.patch("/users/{user_id}", response_model=User.Out) async def patch(user_id: int, user: User.Patch) -> User.Out: db[user_id] = User({db[user_id].model_dump(), **user.model_dump(exclude_unset=True)}) return db[user_id] def test_fastapi(): client = TestClient(app) # POST response = client.post( "/users", json={ "username": "admin", "password": "admin", }, ) assert response.status_code == 200, response.text assert response.json() == { "id": 0, "username": "admin", "settings": {"public": None}, } # GET response = client.get("/users/0") assert response.status_code == 200, response.text assert response.json() == { "id": 0, "username": "admin", "settings": {"public": None}, } # PUT response = client.put( "/users/0", json={ "username": "superadmin", "password": "superadmin", "settings": {"public": "foo", "secret": "secret"}, }, ) assert response.status_code == 200, response.text assert response.json() == { "id": 0, "username": "superadmin", "settings": {"public": "foo"}, } # PATCH response = client.patch( "/users/0", json={ "username": "guest", "settings": {"public": "bar"}, }, ) assert response.status_code == 200, response.text assert response.json() == { "id": 0, "username": "guest", "settings": {"public": "bar"}, } ```

pypi package. Binary

Latest version: 2.0.1 Released: 2024-10-14

BIDS-pydantic

BIDS-pydantic Overview BIDS-pydantic will pull a specified version (from v1.7.0 onwards, tested up to v1.7.0) of the BIDS metadata schema which is used in the JSON BIDS sidecar, from the official BIDS GitHub page, and create corresponding pydantic models, which will provide BIDS data validation using python type annotations. Alternatively, the BIDS-pydantic-models package will only install the models for direct use in your Python software. More information on the use of the models can be found here. Table of Contents Quickstart Installation Usage Development Acknowledgements License How To Contribute Got a great idea for something to implement in BIDS-pydantic, or maybe you have just found a bug? Create an issue to get in touch with the development team and we’ll take it from there. Quickstart If you just want to use the models in your project. Download the pydantic models file for the BIDS schema version you wish to use from the models directory, and add it to your code-base. These files are generated using the bids-pydantic make -a command (see below). Alternatively, you can just run: sh $ pip install bids-pydantic-models More information on the use of the models can be found here. If you want to use the command line tool to generate the models, keep reading this README. Installation Install with: sh $ pip install bids-pydantic BIDS-pydantic can be installed as a module directly from the python package index. For more information how to use a python package in this way please see https://docs.python.org/3/installing/index.html Python Version We recommend using the latest version of Python. BIDS-pydantic supports Python 3.9 and newer. Dependencies These distributions will be installed automatically when installing BIDS-pydantic. pydantic datamodel-code-generator Usage The primary commands can be viewed with the command bids-pydantic: ``` usage: bids-pydantic [-h] {list,make} ... Run one of a set of commands. For example: bids-pydantic list, or bids-pydantic make. Run either command with -h e.g. bids-pydantic make -h to get help for that command. optional arguments: -h, --help show this help message and exit command: {list,make} subcommand to run ``` The list command help can be viewed with the command bids-pydantic list -h: ``` usage: bids-pydantic list [-h] Queries the GitHub API and lists the available supported BIDS schema versions. Only tested up to v1.7.0. optional arguments: -h, --help show this help message and exit ``` The make command help can be viewed with the command bids-pydantic make -h: ``` usage: bids-pydantic make [-h] [--output OUTPUT] [--output-all OUTPUT_ALL] [--schema-version [SCHEMA_VERSION]] Make a new python file(s) containing BIDS compliant pydantic models optional arguments: -h, --help show this help message and exit --output OUTPUT, -o OUTPUT The output python filename to create (will output to stdout console if not specified). --output-all OUTPUT_ALL, -a OUTPUT_ALL Find all parsable schemas and output each to the provided directory. Will create filenames such as bids_schema_model_v_1_7_0.py, etc. Will overwrite any files in that directory with the same name. --schema-version [SCHEMA_VERSION] The BIDS schema version to use. e.g. 1.7.0 - supported versions can be discovered using the list command. If a version is not specified v1.7.0 will be used. --input INPUT, -i INPUT Specify an input BIDS metadata (yml) file to use instead of downloading a version from GitHub. Cannot be used with --schema-version or --output-all ``` Development Development dependencies should be installed using pip install -r requirements/dev.txt -U, and pre-commit install then run to install code-quality Git hooks. Development should be carried out using Python 3.8. Development must comply with a few code styling/quality rules and processes: Before pushing any code, make sure the CHANGELOG.md is updated as per the instructions in the CHANGELOG.md file. tox should also be run to ensure that tests and code-quality checks pass. The README.md file should be updated with any usage or development instructions. Ensure that a good level of test coverage is kept. The test reports will be committed to the CI system when testing is run, and these will be made available during code review. If you wish to view test coverage locally, run coverage report. To ensure these code quality rules are kept to, pre-commit should be installed (see the requirements/dev.txt), and pre-commit install run when first cloning this repo. This will install some pre-commit hooks that will ensure any committed code meets the minimum code-quality and is formatted correctly before being committed to Git. This will ensure that tests will pass on CI system after code is pushed. The tools should also be included in any IDEs/editors used, where possible. To run manually, run precommit run --all-files. The following software tools will be run: mypy pylint black isort pyupgrade Acknowledgements Conversion from schema to pydantic models is carried out using datamodel-code-generator. Data validation is performed using pydantic. License You can check out the full license here This project is licensed under the terms of the MIT license.

pypi package. Binary | Source

Latest version: 0.0.3 Released: 2023-06-08

pydantic-avro

pydantic-avro This library can convert a pydantic class to a avro schema or generate python code from a avro schema. Install bash pip install pydantic-avro Pydantic class to avro schema ```python import json from typing import Optional from pydantic_avro.base import AvroBase class TestModel(AvroBase): key1: str key2: int key2: Optional[str] schema_dict: dict = TestModel.avro_schema() print(json.dumps(schema_dict)) ``` Avro schema to pydantic ```shell Print to stdout pydantic-avro avro_to_pydantic --asvc /path/to/schema.asvc Save it to a file pydantic-avro avro_to_pydantic --asvc /path/to/schema.asvc --output /path/to/output.py ``` Specify expected Avro type ```python from datetime import datetime from pydantic import Field from pydantic_avro.base import AvroBase class ExampleModel(AvroBase): field1: int = Field(..., avro_type="long") # Explicitly set Avro type to "long" field2: datetime = Field(..., avro_type="timestamp-millis") # Explicitly set Avro type to "timestamp-millis" ``` Install for developers Install package Requirement: Poetry 1.* shell poetry install Run unit tests ```shell pytest coverage run -m pytest # with coverage or (depends on your local env) poetry run pytest poetry run coverage run -m pytest # with coverage ``` Run linting The linting is checked in the github workflow. To fix and review issues run this: shell black . # Auto fix all issues isort . # Auto fix all issues pflake . # Only display issues, fixing is manual

pypi package. Binary | Source

Latest version: 0.9.0 Released: 2025-04-09

pydantic-xlsx

pydantic-xlsx This Python library tries to ease parsing and dumping data from and to Excel's xlsx (also known as Office Open XML Workbook) with the help of pydantic models. It uses openpyxl to interact with the Excel files. As with pydantic you define the structure of your in- or output xlsx file as a pydantic model. As with pydantic you define the structure of your data as Models with the help of Python's typing system. You can find the API documentation here. State of the project Alpha state. This package started as a module within another application. I'm currently extracting pydantic-xlsx from this project. So expect some rough edges and missing documentation. Motivation and Overview First of all: If there is another way to accomplish your goal without using spreadsheet software or data formats do it. Spreadsheets have many drawbacks in contrasts to »real« databases and programming. Consider using Jupyter if you need some sort of interaction with your data. This package was written for circumstances where you're forced to work with spreadsheet files. The goal of pydantic-xlsx is to make the interaction with such data sources/environments as comfortable as possible while enforcing as much data validation as possible. Another more or less legit use case for this library is the ability to get a quick overview over your data for debugging. To get a first glimpse consider the following example: ```python from enum import Enum from typing import List from pydantic_xlsx import XlsxField, XlsxModel from pydantic_xlsx.money import Euro class Function(str, Enum): boss = "Boss" worker = "Worker" class Employee(XlsxModel): name: str = XlsxField("", alias="Full Name") age: int wage: Euro function: Function class Config: use_enum_values = True allow_population_by_field_name = True class Company(XlsxModel): staff: List[Employee] class Config: freeze_cell = "A2" my_company = Company(staff=[ Employee(name="Morio Rossi", age=42, wage=4200, function=Function.boss), Employee(name="John Doe", age=23, wage=2300, function=Function.worker) ]) my_company.to_file("my-company.xlsx") ``` Results in the following file: You can parse the file using the from_file method. python loaded_company = Company.from_file("my-company.xlsx") print(loaded_company) A word on the Config sub-class: Inside the Employee model the Config sub-class sets two fairly common pydantic options when working with Excel files. use_enum_values represents enums as their values and not as a classpath without this option »Boss« would be represented as function.boss and »Worker« as function.worker. Using the enum value makes the spreadsheet more user-friendly. By setting allow_population_by_field_name to True you can define alternative column names by setting the alias property of a field. Features In-/Exporting structured data from/to Excel while benefiting from Pydantic's mature features like data validation. The correct Excel number-format is applied according to the field's data type. It's also possible to customize the formatting for a specific field. Define document wide fonts as well as alter the font for columns. Enums columns provides the spreadsheet user with a drop-down menu showing all allowed values. The data is referenced as a Worksheet Table in the Xlsx document. Providing more information on the structure of the data and fast sorting per column. Internal money field type which generates the correct Excel number-format. Some currencies (like Euro or US Dollar) are already defined others can be easily created. The format for printing can be controlled withing pydantic_xlsx. Mapping/Composition Unlike data formates supported by pydantic (like JSON or YAML) spreadsheets do not have an straight forward way of arbitrary nesting data structures. This quickly become a problem if your model describes some lists of lists or alike. Undressed this will lead to a wide range of undefined behavior when translating a pydantic model to a spreadsheets. To circumvent this pydantic-xlsx only allows a defined range of nested data structures to be a valid input. This section gives an overview about this types and their mapping into spreadsheets (this process is internally known as »composition«). Single Model This handles all models which do not contain any models as property type. The resulting spreadsheet will contain one sheet with a single row of data. ```python class Employee(XlsxModel): name: str = XlsxField(alias="Full Name") age: int employee = Employee(name="Morio Rossi", age=42) employee.to_file("employee.xlsx") ``` Will result in the following file: As you see the Excel sheet is named after your Model Class (Employee) which contains a single row of data. Single Model Mapping can only represents a single entry of data thus is not very helpful in most cases. Root collection Collection Types Todo. Field options You can alter the appearance and behavior of columns by using XlsxField. The available options are defined in the XlsxFieldInfo Class. Font (font) Alter the font of a specific field. The fonts are defined using the openpyxl Font object (see above for an example). Number Format (number_format) Optional Excel number format code to alter the display of numbers, dates and so on. Pleas refer to the Excel documentation to learn more about the syntax. Document options The library tries to output spreadsheets with some reasonable styles and tweaks. By defining the inner Config class in your model, you can control this behavior and the appearance of the output document. For more information you can consult the documentation on the XlsxConfig class. Header font (header_font) The library expects the first row of every sheet to contain the names of field. Use this option to alter the appearance of this row by defining your own openpyxl Font (learn more about styling with openpyxl here). The field defaults to openpyxl.styles.Font(name="Arial", bold=True). Font (font) Optional openpyxl Font (learn more about styling with openpyxl here) for all of your spreadsheet except the title row. Defaults to None. Freeze Cells (freeze_cell) Freezing cells makes them visible while scrolling trough your document (learn more in the Excel documentation). This is especially to pin the header row. This is also what pydantic-xlsx is doing by default (freeze cell at A2) Todo: The rest Known pitfalls Massive amount of empty cells when loading a Spreadsheet with data validation (Generics) Cause: pydantic-xlsx uses a method of openpyxl to determine the dimension of the data area (aka the part of the spreadsheet actually containing some data). A cell is treated as non-empty (thus expanding the size of the imported area) from the point some properties where set for this cell. Defining a valid data range is on of them. If a user accidentally define a valid data range for the whole column you end up with pydanic-xlsx trying to import and validate thousands of seemingly empty rows. Solution. This is exactly why you shouldn't use spreadsheets in the first place. The only solution is to manually delete all formatting etc from all worksheets. Or just copy the relevant data into a new spreadsheet (including the header).

pypi package. Binary

Latest version: 0.1.0 Released: 2025-01-25

pydantic-scim

pydantic-scim Largely generated by running datamodel-codegen on the files in schema/, and then cleaned up by hand.

pypi package. Binary

Latest version: 0.0.8 Released: 2023-10-26

pydantic-zarr

pydantic-zarr Pydantic models for Zarr. ⚠️ Disclaimer ⚠️ This project is under a lot of flux -- I want to add zarr version 3 support to this project, but the reference python implementation doesn't support version 3 yet. Also, the key ideas in this repo may change in the process of being formalized over in this specification (currently just a draft). As the ecosystem evolves I will be breaking things (and versioning the project accordingly), so be advised! Installation pip install -U pydantic-zarr Help See the documentation for detailed information about this project. Example ```python import zarr from pydantic_zarr import GroupSpec group = zarr.group(path='foo') array = zarr.create(store = group.store, path='foo/bar', shape=10, dtype='uint8') array.attrs.put({'metadata': 'hello'}) this is a pydantic model spec = GroupSpec.from_zarr(group) print(spec.model_dump()) """ { 'zarr_version': 2, 'attributes': {}, 'members': { 'bar': { 'zarr_version': 2, 'attributes': {'metadata': 'hello'}, 'shape': (10,), 'chunks': (10,), 'dtype': '|u1', 'fill_value': 0, 'order': 'C', 'filters': None, 'dimension_separator': '.', 'compressor': { 'id': 'blosc', 'cname': 'lz4', 'clevel': 5, 'shuffle': 1, 'blocksize': 0, }, } }, } """ ```

pypi package. Binary

Latest version: 0.7.0 Released: 2024-03-20

pydantic-form

pydantic-form json-schema Form generator using pydantic

pypi package. Binary | Source

Latest version: 0.0.1 Released: 2022-08-22

pydantic-marc

pydantic-marc pydantic-marc is a library for validating data against the MARC21 Format for Bibliographic Data. Installation Use pip: $ pip install pydantic-marc Features pydantic-marc uses pydantic, the popular data validation library, to define the valid components of a MARC record. The package expects users will employ pymarc to read MARC records from binary files. Basic usage: Validating a MARC record: ```python from pymarc import MARCReader from rich import print from pydantic_marc import MarcRecord with open("temp/valid.mrc", "rb") as fh: reader = MARCReader(fh) for record in reader: print(record) model = MarcRecord.model_validate(record, from_attributes=True) print(model.model_dump()) json { "leader": "00536nam a22001985i 4500", "fields": [ {"001": "123456789"}, {"008": "201201s2020 nyua 000 1 eng d"}, {"035": {"ind1": " ", "ind2": " ", "subfields": [{"a": "(OCoLC)1234567890"}]}}, {"049": {"ind1": " ", "ind2": " ", "subfields": [{"a": "NYPP"}]}}, { "245": { "ind1": "0", "ind2": "0", "subfields": [ {"a": "Fake :"}, {"b": "Marc Record"}, ] } }, { "264": { "ind1": " ", "ind2": "1", "subfields": [ {"a": "New York :"}, {"b": "NY,"}, {"c": "[2020]"} ] } }, { "300": { "ind1": " ", "ind2": " ", "subfields": [{"a": "100 pages :"}, {"b": "color illustrations;"}, {"c": "30 cm"}] } }, {"336": {"ind1": " ", "ind2": " ", "subfields": [{"a": "text"}, {"b": "txt"}, {"2": "rdacontent"}]}}, {"337": {"ind1": " ", "ind2": " ", "subfields": [{"a": "unmediated"}, {"b": "n"}, {"2": "rdamedia"}]}}, {"338": {"ind1": " ", "ind2": " ", "subfields": [{"a": "volume"}, {"b": "nc"}, {"2": "rdacarrier"}]}} ] } ``` If the record is invalid the errors can be returned as json, a dictionary, or in a human-readable format. JSON Error Message: ```python from pydantic import ValidationError from pymarc import MARCReader from pydantic_marc import MarcRecord with open("temp/invalid.mrc", "rb") as fh: reader = MARCReader(fh) for record in reader: print(record) try: MarcRecord.model_validate(record) except ValidationError as e: # errors as a dictionary print(e.errors()) # errors as json print(e.json()) json [ { "type": "non_repeatable_field", "loc": ("fields", "001"), "msg": "001: Has been marked as a non-repeating field.", "input": "001", "ctx": {"input": "001"} }, { "type": "missing_required_field", "loc": ("fields", "245"), "msg": "One 245 field must be present in a MARC21 record.", "input": "245", "ctx": {"input": "245"} }, { "type": "multiple_1xx_fields", "loc": ("fields", "100", "110"), "msg": "1XX: Only one 1XX tag is allowed. Record contains: ['100', '110']", "input": ["100", "110"], "ctx": {"input": ["100", "110"]} }, { "type": "control_field_length_invalid", "loc": ("fields", "006"), "msg": "006: Length appears to be invalid. Reported length is: 6. Expected length is: 18", "input": "p |", "ctx": {"tag": "006", "valid": 18, "input": "p |", "length": 6} }, { "type": "invalid_indicator", "loc": ("fields", "035", "ind1"), "msg": "035 ind1: Invalid data (0). Indicator should be ['', ' '].", "input": "0", "ctx": {"loc": ("035", "ind1"), "input": "0", "valid": ["", " "], "tag": "035", "ind": "ind1"} }, { "type": "non_repeatable_subfield", "loc": ("fields", "600", "a"), "msg": "600 $a: Subfield cannot repeat.", "input": [PydanticSubfield(code="a", value="Foo"), PydanticSubfield(code="a", value="Foo,")], "ctx": { "loc": ("600", "a"), "input": [PydanticSubfield(code="a", value="Foo"), PydanticSubfield(code="a", value="Foo,")], "tag": "600", "code": "a" } } ] Human-readable Error Message:python from pydantic import ValidationError from pymarc import MARCReader from pydantic_marc import MarcRecord with open("temp/invalid.mrc", "rb") as fh: reader = MARCReader(fh) for record in reader: print(record) try: MarcRecord.model_validate(record) except ValidationError as e: # errors in a human-readable format print(e.errors()) text 6 validation errors for MarcRecord fields.001 001: Has been marked as a non-repeating field. [type=non_repeatable_field, input_value='001', input_type=str] fields.245 One 245 field must be present in a MARC21 record. [type=missing_required_field, input_value='245', input_type=str] fields.100.110 1XX: Only one 1XX tag is allowed. Record contains: ['100', '110'] [type=multiple_1xx_fields, input_value=['100', '110'], input_type=list] fields.006 006: Length appears to be invalid. Reported length is: 6. Expected length is: 18 [type=control_field_length_invalid, input_value='p |', input_type=str] fields.035.ind1 035 ind1: Invalid data (0). Indicator should be ['', ' ']. [type=invalid_indicator, input_value='0', input_type=str] fields.600.a 600 $a: Subfield cannot repeat. [type=non_repeatable_subfield, input_value=[PydanticSubfield(code='a...code='a', value='Foo,')], input_type=list] ```

pypi package. Binary

Latest version: 0.1.0 Released: 2025-03-09

pydantic-meta

pypi package. Binary

Latest version: 0.3.3 Released: 2023-04-03