Skip to content
Commits on Source (2)
......@@ -11,6 +11,10 @@ setuptools = "*"
wheel = "*"
twine = "*"
pipenv = "*"
sphinx = "*"
sphinx-rtd-theme = "*"
recommonmark = "*"
"m2r" = "*"
[requires]
python_version = "3.6"
{
"_meta": {
"hash": {
"sha256": "1291e48f2e23669abee408fb38ac26298cb5abf8badde09d57ef74b5bb12d503"
"sha256": "ef6a7a5718470df7ea704c5913bb81885fbf26cb4c7e8b775d8e657288b50a3e"
},
"pipfile-spec": 6,
"requires": {
......@@ -26,6 +26,20 @@
}
},
"develop": {
"alabaster": {
"hashes": [
"sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359",
"sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"
],
"version": "==0.7.12"
},
"babel": {
"hashes": [
"sha256:6778d85147d5d85345c14a26aada5e478ab04e39b078b0745ee6870c2b5cf669",
"sha256:8cba50f48c529ca3fa18cf81fa9403be176d374ac4d60738b839122dfaaa3d23"
],
"version": "==2.6.0"
},
"bleach": {
"hashes": [
"sha256:48d39675b80a75f6d1c3bdbffec791cf0bbbab665cf01e20da701c77de278718",
......@@ -47,6 +61,12 @@
],
"version": "==3.0.4"
},
"commonmark": {
"hashes": [
"sha256:34d73ec8085923c023930dfc0bcd1c4286e28a2a82de094bb72fabcc0281cbe5"
],
"version": "==0.5.4"
},
"docutils": {
"hashes": [
"sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6",
......@@ -62,6 +82,74 @@
],
"version": "==2.7"
},
"imagesize": {
"hashes": [
"sha256:3f349de3eb99145973fefb7dbe38554414e5c30abd0c8e4b970a7c9d09f3a1d8",
"sha256:f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5"
],
"version": "==1.1.0"
},
"jinja2": {
"hashes": [
"sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd",
"sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4"
],
"version": "==2.10"
},
"m2r": {
"hashes": [
"sha256:bf90bad66cda1164b17e5ba4a037806d2443f2a4d5ddc9f6a5554a0322aaed99"
],
"index": "pypi",
"version": "==0.2.1"
},
"markupsafe": {
"hashes": [
"sha256:048ef924c1623740e70204aa7143ec592504045ae4429b59c30054cb31e3c432",
"sha256:130f844e7f5bdd8e9f3f42e7102ef1d49b2e6fdf0d7526df3f87281a532d8c8b",
"sha256:19f637c2ac5ae9da8bfd98cef74d64b7e1bb8a63038a3505cd182c3fac5eb4d9",
"sha256:1b8a7a87ad1b92bd887568ce54b23565f3fd7018c4180136e1cf412b405a47af",
"sha256:1c25694ca680b6919de53a4bb3bdd0602beafc63ff001fea2f2fc16ec3a11834",
"sha256:1f19ef5d3908110e1e891deefb5586aae1b49a7440db952454b4e281b41620cd",
"sha256:1fa6058938190ebe8290e5cae6c351e14e7bb44505c4a7624555ce57fbbeba0d",
"sha256:31cbb1359e8c25f9f48e156e59e2eaad51cd5242c05ed18a8de6dbe85184e4b7",
"sha256:3e835d8841ae7863f64e40e19477f7eb398674da6a47f09871673742531e6f4b",
"sha256:4e97332c9ce444b0c2c38dd22ddc61c743eb208d916e4265a2a3b575bdccb1d3",
"sha256:525396ee324ee2da82919f2ee9c9e73b012f23e7640131dd1b53a90206a0f09c",
"sha256:52b07fbc32032c21ad4ab060fec137b76eb804c4b9a1c7c7dc562549306afad2",
"sha256:52ccb45e77a1085ec5461cde794e1aa037df79f473cbc69b974e73940655c8d7",
"sha256:5c3fbebd7de20ce93103cb3183b47671f2885307df4a17a0ad56a1dd51273d36",
"sha256:5e5851969aea17660e55f6a3be00037a25b96a9b44d2083651812c99d53b14d1",
"sha256:5edfa27b2d3eefa2210fb2f5d539fbed81722b49f083b2c6566455eb7422fd7e",
"sha256:7d263e5770efddf465a9e31b78362d84d015cc894ca2c131901a4445eaa61ee1",
"sha256:83381342bfc22b3c8c06f2dd93a505413888694302de25add756254beee8449c",
"sha256:857eebb2c1dc60e4219ec8e98dfa19553dae33608237e107db9c6078b1167856",
"sha256:98e439297f78fca3a6169fd330fbe88d78b3bb72f967ad9961bcac0d7fdd1550",
"sha256:bf54103892a83c64db58125b3f2a43df6d2cb2d28889f14c78519394feb41492",
"sha256:d9ac82be533394d341b41d78aca7ed0e0f4ba5a2231602e2f05aa87f25c51672",
"sha256:e982fe07ede9fada6ff6705af70514a52beb1b2c3d25d4e873e82114cf3c5401",
"sha256:edce2ea7f3dfc981c4ddc97add8a61381d9642dc3273737e756517cc03e84dd6",
"sha256:efdc45ef1afc238db84cb4963aa689c0408912a0239b0721cb172b4016eb31d6",
"sha256:f137c02498f8b935892d5c0172560d7ab54bc45039de8805075e19079c639a9c",
"sha256:f82e347a72f955b7017a39708a3667f106e6ad4d10b25f237396a7115d8ed5fd",
"sha256:fb7c206e01ad85ce57feeaaa0bf784b97fa3cad0d4a5737bc5295785f5c613a1"
],
"version": "==1.1.0"
},
"mistune": {
"hashes": [
"sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e",
"sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4"
],
"version": "==0.8.4"
},
"packaging": {
"hashes": [
"sha256:0886227f54515e592aaa2e5a553332c73962917f2831f1b0f9b9f4380a4b9807",
"sha256:f95a1e147590f204328170981833854229bb2912ac3d5f89e2a8ccd2834800c9"
],
"version": "==18.0"
},
"pipenv": {
"hashes": [
"sha256:a785235bf2ddf65ea8a91531b3372471d9ad86036335dba8bd63f20c00a68e63",
......@@ -85,6 +173,20 @@
],
"version": "==2.2.0"
},
"pyparsing": {
"hashes": [
"sha256:40856e74d4987de5d01761a22d1621ae1c7f8774585acae358aa5c5936c6c90b",
"sha256:f353aab21fd474459d97b709e527b5571314ee5f067441dc9f88e33eecd96592"
],
"version": "==2.3.0"
},
"pytz": {
"hashes": [
"sha256:31cb35c89bd7d333cd32c5f278fca91b523b0834369e757f4c5641ea252236ca",
"sha256:8e0f8568c118d3077b46be7d654cc8167fa916092e28320cde048e54bfc9f1e6"
],
"version": "==2018.7"
},
"readme-renderer": {
"hashes": [
"sha256:bb16f55b259f27f75f640acf5e00cf897845a8b3e4731b5c1a436e4b8529202f",
......@@ -92,12 +194,20 @@
],
"version": "==24.0"
},
"recommonmark": {
"hashes": [
"sha256:6e29c723abcf5533842376d87c4589e62923ecb6002a8e059eb608345ddaff9d",
"sha256:cd8bf902e469dae94d00367a8197fb7b81fcabc9cfb79d520e0d22d0fbeaa8b7"
],
"index": "pypi",
"version": "==0.4.0"
},
"requests": {
"hashes": [
"sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c",
"sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279"
"sha256:65b3a120e4329e33c9889db89c80976c5272f56ea92d3e74da8a463992e3ff54",
"sha256:ea881206e59f41dbd0bd445437d792e43906703fff75ca8ff43ccdb11f33f263"
],
"version": "==2.20.0"
"version": "==2.20.1"
},
"requests-toolbelt": {
"hashes": [
......@@ -113,6 +223,36 @@
],
"version": "==1.11.0"
},
"snowballstemmer": {
"hashes": [
"sha256:919f26a68b2c17a7634da993d91339e288964f93c274f1343e3bbbe2096e1128",
"sha256:9f3bcd3c401c3e862ec0ebe6d2c069ebc012ce142cce209c098ccb5b09136e89"
],
"version": "==1.2.1"
},
"sphinx": {
"hashes": [
"sha256:120732cbddb1b2364471c3d9f8bfd4b0c5b550862f99a65736c77f970b142aea",
"sha256:b348790776490894e0424101af9c8413f2a86831524bd55c5f379d3e3e12ca64"
],
"index": "pypi",
"version": "==1.8.2"
},
"sphinx-rtd-theme": {
"hashes": [
"sha256:02f02a676d6baabb758a20c7a479d58648e0f64f13e07d1b388e9bb2afe86a09",
"sha256:d0f6bc70f98961145c5b0e26a992829363a197321ba571b31b24ea91879e0c96"
],
"index": "pypi",
"version": "==0.4.2"
},
"sphinxcontrib-websupport": {
"hashes": [
"sha256:68ca7ff70785cbe1e7bccc71a48b5b6d965d79ca50629606c7861a21b206d9dd",
"sha256:9de47f375baf1ea07cdb3436ff39d7a9c76042c10a769c52353ec46e4e8fc3b9"
],
"version": "==1.1.0"
},
"tqdm": {
"hashes": [
"sha256:3c4d4a5a41ef162dd61f1edb86b0e1c7859054ab656b2e7c7b77e7fbf6d9f392",
......@@ -130,10 +270,10 @@
},
"urllib3": {
"hashes": [
"sha256:41c3db2fc01e5b907288010dec72f9d0a74e37d6994e6eb56849f59fea2265ae",
"sha256:8819bba37a02d143296a4d032373c4dd4aca11f6d4c9973335ca75f9c8475f59"
"sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39",
"sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"
],
"version": "==1.24"
"version": "==1.24.1"
},
"virtualenv": {
"hashes": [
......
# Dictionary Deserializer
# Dictionary deserializer
Dictionary deserializer is a project built to convert dictionaries into
composite classes in an intuitive way. Special attention was also paid
to being friendly to static type-checkers and IDE autocompletes.
It is expected that this library is used together with a JSON-to-dict
deserializer like `json.loads`.
## Design
This project was originally meant as a proof of concept, to be used to find
other projects that would be able to replace this, with the required feature
set. That project was not found, and therefore, this project was expanded.
### Requirements
* Use type hints for type validation
* Allow polymorphism
* Through `typing.Union`s
* Through subclassing
* Support a large part of the `typing` module's types
* Allow validations on values
* Be able to validate and deserialize _any_ compliant JSON structure
* Be compatible with static type checkers and IDE hinting
* Have a small impact on existing code starting to use this library
## Examples
None of this code is actually useful if you don't understand how to use it. It
is very simple. Here are some examples:
### Specifying a structure
```python
from typing import Optional
from dict_deserializer.deserializer import Deserializable
class User(Deserializable):
email: str # Type must be a string
username: str # Type must be a string
password: Optional[str] # Type must either be a string or a None
```
### Deserialization
```python
from dict_deserializer.deserializer import deserialize, Rule
# Successful
deserialize(Rule(User), {
'email': 'pypi@rolfvankleef.nl',
'username': 'rkleef',
})
# Fails because optional type is wrong
deserialize(Rule(User), {
'email': 'pypi@rolfvankleef.nl',
'username': 'rkleef',
'password': 9.78,
})
```
### Polymorphic structures
```python
from typing import Optional, Any, List
from dict_deserializer.deserializer import Deserializable
from dict_deserializer.annotations import abstract
@abstract
class DirectoryObject(Deserializable):
name: str
meta: Any
class User(DirectoryObject):
full_name: str
first_name: Optional[str]
class Group(DirectoryObject):
members: List[DirectoryObject]
```
If you deserialize into `Rule(DirectoryObject)`, the matching class will
automatically be selected. If none of the subclasses match, an error is thrown
since the DirectoryObject is declared abstract.
If you want to discriminate not by field names or types, but by their values,
one can choose to define a `@discriminator` annotation.
### Value validations
The syntax for validating the value of a key is currently a bit weird. It is
incompatible with existing syntax for defaults, but the type syntax is the same.
Example:
```python
from typing import Optional
from dict_deserializer.deserializer import Deserializable
from dict_deserializer.annotations import validated
class Test(Deserializable):
name: Optional[str]
@validated(default='Unknown')
def name(self, value):
if len(value) > 20:
raise TypeError('Name may not be longer than 20 characters.')
```
## Limitations
This library uses the `typing` module extensively. It does, however, only
......@@ -12,6 +120,9 @@ support some of its types. This is a list of verified composite types:
* `Union` (Including `Optional`)
* `List`
* `Any`
* `dict_deserializer.deserializer.Deserializable`
* `dict`
* `list`
It supports these types as terminal types:
......@@ -20,3 +131,15 @@ It supports these types as terminal types:
* `str`
* `NoneType`
* `bool`
## Planned features
* Tuples
* Lists will probably deserialize into tuples
* NamedTuples
* The anonymous namedtuple and the class-namedtuples with (optionally) type annotations.
* Dataclasses
* A way to allow deserializing into a class not extending `Deserializable`
* Enums
* Sets
* From lists
from collections import namedtuple
name = 'Dictionary deserializer'
version = '0.0.3'
version = '0.0.4'
description = "Dictionary deserializer is a package that aides in the " \
"deserializing of JSON (or other structures) that are " \
"converted to dicts, into composite classes."
......
......@@ -2,12 +2,25 @@ from abc import ABC, abstractmethod
class Discriminator(ABC):
"""
Base class for all discriminator.
"""
@abstractmethod
def check(self, d: dict):
"""
Returns true or false, depending on whether the discriminator
matches the provided class.
:param d: the data
:return: True when this class is valid, False otherwise.
"""
return
class KeyValueDiscriminator:
class KeyValueDiscriminator(Discriminator):
"""
Discriminates on key and optionally a value.
"""
def __init__(self, key, value, has_value=True):
self.key = key
self.value = value
......@@ -27,7 +40,10 @@ class KeyValueDiscriminator:
return True
class FunctionDiscriminator:
class FunctionDiscriminator(Discriminator):
"""
Discriminates according to a custom function.
"""
def __init__(self, matcher):
self.matcher = matcher
......@@ -35,14 +51,24 @@ class FunctionDiscriminator:
return self.matcher(d)
sentinel = object()
_sentinel = object()
def discriminate(key=None, value=_sentinel, matcher=None):
"""
Class level annotation to specify requirements of the raw datastructure
in order to be allowed to deserialize into this class (or its subclasses).
def discriminate(key=None, value=sentinel, matcher=None):
:param key: The key to discriminate against
:param value: (Optionally) the value that the property designated by ``key``
should hold
:param matcher: (Optionally) a custom function to discriminate with.
:return: A function that should wrap the class to be discriminated.
"""
def _inner(cls):
dc = None
if key is not None:
dc = KeyValueDiscriminator(key, value, value is not sentinel)
dc = KeyValueDiscriminator(key, value, value is not _sentinel)
elif matcher is not None:
dc = FunctionDiscriminator(matcher)
......@@ -56,5 +82,38 @@ def discriminate(key=None, value=sentinel, matcher=None):
def abstract(cls):
"""
Declares that this class cannot be instanced. Only subclasses that are
not declared abstract could be instanced.
This is equivalent to setting the class property ``_abstract=True``.
:param cls: The class that should be abstract
:return: The same class
"""
cls._abstract = True
return cls
def validated(default=None):
"""
Used to decorate a validator function. Can be used if one would want to
constrain the value of a property. Of course, ``@property`` may be used as
well. Throw a TypeError when validation fails.
:param default: The default (initial) value of this property
:return: the wrapper function.
"""
def _wrapper(fn):
# List is used because of stupid python scoping rules.
iv = [default]
def _getter(_):
return iv[0]
def _setter(self, val):
fn(self, val)
iv[0] = val
return property(fget=_getter, fset=_setter)
return _wrapper
from typing import Optional, Union, List, Tuple, Dict
from typing import Optional, Union, List, Tuple, Dict, Any
from typeguard import check_type
......@@ -42,16 +42,20 @@ class Rule:
return value
class BaseMeta(type):
class DeserializableMeta(type):
"""
Metaclass for all Deserializable
"""
def __new__(
mcs: 'BaseMeta', name: str, bases: Tuple[type], namespace: dict)\
mcs: 'DeserializableMeta', name: str, bases: Tuple[type], namespace: dict)\
-> type:
def auto_ctor(self, **kwargs):
for k, v in type(self).get_attrs().items():
setattr(self, k, kwargs.get(k))
namespace['_discriminators'] = []
namespace['_abstract'] = False
namespace['__init__'] = auto_ctor
cls = type.__new__(mcs, name, bases, namespace)
......@@ -67,7 +71,7 @@ class BaseMeta(type):
return cls
def rbase(cls: type, ls: List[type]=None) -> List[type]:
def _rbase(cls: type, ls: List[type]=None) -> List[type]:
"""
Get all base classes for cls.
"""
......@@ -77,7 +81,7 @@ def rbase(cls: type, ls: List[type]=None) -> List[type]:
if len(cls.__bases__) > 0:
for k in cls.__bases__:
ls.append(k)
rbase(k, ls)
_rbase(k, ls)
return ls
......@@ -96,7 +100,7 @@ def _is_valid(key: str, value) -> bool:
not isinstance(value, property)
class Deserializable(metaclass=BaseMeta):
class Deserializable(metaclass=DeserializableMeta):
"""
Base class for all automagically deserializing classes.
"""
......@@ -109,11 +113,13 @@ class Deserializable(metaclass=BaseMeta):
"""
fields = {}
defaults = {}
rl = list(reversed(rbase(cls)))
rl = list(reversed(_rbase(cls)))
rl.append(cls)
for c in rl:
for k in c.__dict__:
if _is_valid(k, c.__dict__[k]):
if isinstance(c.__dict__[k], property):
fields[k] = Rule(Any)
elif _is_valid(k, c.__dict__[k]):
defaults[k] = c.__dict__[k]
fields[k] = Rule(Optional[type(defaults[k])],
default=defaults[k])
......@@ -137,7 +143,7 @@ def get_deserialization_classes(t, d, try_all=True) -> List[type]:
:param t: The type to match from.
:param d: The dict to match onto.
:param try_all: Whether to support automatic discrimination.
:return:
:return: an ordered list of candidate classes to deserialize into.
"""
candidates = []
for sc in t.__subclasses__():
......@@ -215,25 +221,24 @@ def deserialize(rule: Rule, data, try_all: bool=True, key: str='$'):
classes = get_deserialization_classes(rule.type, data, try_all)
cause = None
for cls in classes:
try:
instance = cls()
for k, r in cls.get_attrs().items():
v = deserialize(
return cls(**{k: deserialize(
r,
data[k] if k in data else r.default,
try_all,
key='{}.{}'.format(key, k)
)
setattr(instance, k, v)
return instance
) for k, r in cls.get_attrs().items()})
except TypeError as e:
if not try_all:
raise e
else:
cause = e
raise TypeError('Unable to find matching non-abstract (sub)type of '
'{} with key {}.'.format(rule.type, key))
'{} with key {}. Reason: {}.'.format(rule.type, key, cause))
raise TypeError('Unable to find a deserialization candidate for '
'{} with key {}.'.format(rule, key))
/build/
\ No newline at end of file
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
SOURCEDIR = source
BUILDDIR = build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
\ No newline at end of file
@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=source
set BUILDDIR=build
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
:end
popd
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../../'))
# -- Project information -----------------------------------------------------
project = 'dict_deserializer'
copyright = '2018, Rolf van Kleef'
author = 'Rolf van Kleef'
# The short X.Y version
version = '0.0'
# The full version, including alpha/beta/rc tags
release = '0.0.4'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'm2r',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
source_suffix = ['.rst', '.md']
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {
# 'navigation_depth': 4,
# 'includehidden': True,
# }
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'dict_deserializerdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'dict_deserializer.tex', 'dict\\_deserializer Documentation',
'Rolf van Kleef', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'dict_deserializer', 'dict_deserializer Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'dict_deserializer', 'dict_deserializer Documentation',
author, 'dict_deserializer', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
# -- Options for todo extension ----------------------------------------------
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
.. toctree::
:maxdepth: 4
:hidden:
Home <self>
Modules <modules>
.. mdinclude:: ../../README.md
Modules
=======
dict\_deserializer.annotations
------------------------------
.. automodule:: dict_deserializer.annotations
:members:
:undoc-members:
:show-inheritance:
dict\_deserializer.deserializer
-------------------------------
.. automodule:: dict_deserializer.deserializer
:members:
:undoc-members:
:show-inheritance:
build:
image: latest
python:
version: 3.6
requirements_file: requirements.txt
......@@ -7,9 +7,6 @@ from dict_deserializer.deserializer import Deserializable, deserialize, Rule
@abstract
class Object(Deserializable):
def __init__(self, name=None):
self.name = name
name: str
def __repr__(self):
......@@ -20,11 +17,6 @@ class Object(Deserializable):
class User(Object):
def __init__(self, full_name=None, calling_name=None, *args, **kwargs):
super(User, self).__init__(*args, **kwargs)
self.full_name = full_name
self.calling_name = calling_name
full_name: str
calling_name: Optional[str] = 'Unknown'
......@@ -43,10 +35,6 @@ class User(Object):
class Group(Object):
def __init__(self, members=None, *args, **kwargs):
super(Group, self).__init__(*args, **kwargs)
self.members = members
members: List[Object]
def __repr__(self):
......
import unittest
from dict_deserializer.annotations import validated
from dict_deserializer.deserializer import Deserializable, deserialize, Rule
class Object(Deserializable):
name: str
@validated()
def name(self, value):
if len(value) > 10:
raise TypeError("Maximum name length is 20 characters")
def __repr__(self):
return 'Object(name="{}")'.format(self.name)
def __eq__(self, other):
return isinstance(other, Object) and other.name == self.name
class TestLists(unittest.TestCase):
def test_SetWrongTypeShouldFail(self):
with self.assertRaises(TypeError):
deserialize(Rule(Object), {
'name': 8
})
def test_SetWrongValueShouldFail(self):
with self.assertRaises(TypeError):
deserialize(Rule(Object), {
'name': 'abcdefghijklmnopqrstuvwxyz'
})
def test_SetCorrectValueShouldSucceed(self):
self.assertEqual(
Object(name='Rolf'),
deserialize(Rule(Object), {
'name': 'Rolf'
}, try_all=False)
)