mirror of
https://github.com/aykhans/AzSuicideDataVisualization.git
synced 2025-07-01 14:07:48 +00:00
first commit
This commit is contained in:
47
.venv/Lib/site-packages/pyrsistent/__init__.py
Normal file
47
.venv/Lib/site-packages/pyrsistent/__init__.py
Normal file
@ -0,0 +1,47 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pyrsistent._pmap import pmap, m, PMap
|
||||
|
||||
from pyrsistent._pvector import pvector, v, PVector
|
||||
|
||||
from pyrsistent._pset import pset, s, PSet
|
||||
|
||||
from pyrsistent._pbag import pbag, b, PBag
|
||||
|
||||
from pyrsistent._plist import plist, l, PList
|
||||
|
||||
from pyrsistent._pdeque import pdeque, dq, PDeque
|
||||
|
||||
from pyrsistent._checked_types import (
|
||||
CheckedPMap, CheckedPVector, CheckedPSet, InvariantException, CheckedKeyTypeError,
|
||||
CheckedValueTypeError, CheckedType, optional)
|
||||
|
||||
from pyrsistent._field_common import (
|
||||
field, PTypeError, pset_field, pmap_field, pvector_field)
|
||||
|
||||
from pyrsistent._precord import PRecord
|
||||
|
||||
from pyrsistent._pclass import PClass, PClassMeta
|
||||
|
||||
from pyrsistent._immutable import immutable
|
||||
|
||||
from pyrsistent._helpers import freeze, thaw, mutant
|
||||
|
||||
from pyrsistent._transformations import inc, discard, rex, ny
|
||||
|
||||
from pyrsistent._toolz import get_in
|
||||
|
||||
|
||||
__all__ = ('pmap', 'm', 'PMap',
|
||||
'pvector', 'v', 'PVector',
|
||||
'pset', 's', 'PSet',
|
||||
'pbag', 'b', 'PBag',
|
||||
'plist', 'l', 'PList',
|
||||
'pdeque', 'dq', 'PDeque',
|
||||
'CheckedPMap', 'CheckedPVector', 'CheckedPSet', 'InvariantException', 'CheckedKeyTypeError', 'CheckedValueTypeError', 'CheckedType', 'optional',
|
||||
'PRecord', 'field', 'pset_field', 'pmap_field', 'pvector_field',
|
||||
'PClass', 'PClassMeta',
|
||||
'immutable',
|
||||
'freeze', 'thaw', 'mutant',
|
||||
'get_in',
|
||||
'inc', 'discard', 'rex', 'ny')
|
213
.venv/Lib/site-packages/pyrsistent/__init__.pyi
Normal file
213
.venv/Lib/site-packages/pyrsistent/__init__.pyi
Normal file
@ -0,0 +1,213 @@
|
||||
# flake8: noqa: E704
|
||||
# from https://gist.github.com/WuTheFWasThat/091a17d4b5cab597dfd5d4c2d96faf09
|
||||
# Stubs for pyrsistent (Python 3.6)
|
||||
|
||||
from typing import Any
|
||||
from typing import AnyStr
|
||||
from typing import Callable
|
||||
from typing import Iterable
|
||||
from typing import Iterator
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Mapping
|
||||
from typing import MutableMapping
|
||||
from typing import Sequence
|
||||
from typing import Set
|
||||
from typing import Union
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TypeVar
|
||||
from typing import overload
|
||||
|
||||
# see commit 08519aa for explanation of the re-export
|
||||
from pyrsistent.typing import CheckedKeyTypeError as CheckedKeyTypeError
|
||||
from pyrsistent.typing import CheckedPMap as CheckedPMap
|
||||
from pyrsistent.typing import CheckedPSet as CheckedPSet
|
||||
from pyrsistent.typing import CheckedPVector as CheckedPVector
|
||||
from pyrsistent.typing import CheckedType as CheckedType
|
||||
from pyrsistent.typing import CheckedValueTypeError as CheckedValueTypeError
|
||||
from pyrsistent.typing import InvariantException as InvariantException
|
||||
from pyrsistent.typing import PClass as PClass
|
||||
from pyrsistent.typing import PBag as PBag
|
||||
from pyrsistent.typing import PDeque as PDeque
|
||||
from pyrsistent.typing import PList as PList
|
||||
from pyrsistent.typing import PMap as PMap
|
||||
from pyrsistent.typing import PMapEvolver as PMapEvolver
|
||||
from pyrsistent.typing import PSet as PSet
|
||||
from pyrsistent.typing import PSetEvolver as PSetEvolver
|
||||
from pyrsistent.typing import PTypeError as PTypeError
|
||||
from pyrsistent.typing import PVector as PVector
|
||||
from pyrsistent.typing import PVectorEvolver as PVectorEvolver
|
||||
|
||||
T = TypeVar('T')
|
||||
KT = TypeVar('KT')
|
||||
VT = TypeVar('VT')
|
||||
|
||||
def pmap(initial: Union[Mapping[KT, VT], Iterable[Tuple[KT, VT]]] = {}, pre_size: int = 0) -> PMap[KT, VT]: ...
|
||||
def m(**kwargs: VT) -> PMap[str, VT]: ...
|
||||
|
||||
def pvector(iterable: Iterable[T] = ...) -> PVector[T]: ...
|
||||
def v(*iterable: T) -> PVector[T]: ...
|
||||
|
||||
def pset(iterable: Iterable[T] = (), pre_size: int = 8) -> PSet[T]: ...
|
||||
def s(*iterable: T) -> PSet[T]: ...
|
||||
|
||||
# see class_test.py for use cases
|
||||
Invariant = Tuple[bool, Optional[Union[str, Callable[[], str]]]]
|
||||
|
||||
@overload
|
||||
def field(
|
||||
type: Union[Type[T], Sequence[Type[T]]] = ...,
|
||||
invariant: Callable[[Any], Union[Invariant, Iterable[Invariant]]] = lambda _: (True, None),
|
||||
initial: Any = object(),
|
||||
mandatory: bool = False,
|
||||
factory: Callable[[Any], T] = lambda x: x,
|
||||
serializer: Callable[[Any, T], Any] = lambda _, value: value,
|
||||
) -> T: ...
|
||||
# The actual return value (_PField) is irrelevant after a PRecord has been instantiated,
|
||||
# see https://github.com/tobgu/pyrsistent/blob/master/pyrsistent/_precord.py#L10
|
||||
@overload
|
||||
def field(
|
||||
type: Any = ...,
|
||||
invariant: Callable[[Any], Union[Invariant, Iterable[Invariant]]] = lambda _: (True, None),
|
||||
initial: Any = object(),
|
||||
mandatory: bool = False,
|
||||
factory: Callable[[Any], Any] = lambda x: x,
|
||||
serializer: Callable[[Any, Any], Any] = lambda _, value: value,
|
||||
) -> Any: ...
|
||||
|
||||
# Use precise types for the simplest use cases, but fall back to Any for
|
||||
# everything else. See record_test.py for the wide range of possible types for
|
||||
# item_type
|
||||
@overload
|
||||
def pset_field(
|
||||
item_type: Type[T],
|
||||
optional: bool = False,
|
||||
initial: Iterable[T] = ...,
|
||||
) -> PSet[T]: ...
|
||||
@overload
|
||||
def pset_field(
|
||||
item_type: Any,
|
||||
optional: bool = False,
|
||||
initial: Any = (),
|
||||
) -> PSet[Any]: ...
|
||||
|
||||
@overload
|
||||
def pmap_field(
|
||||
key_type: Type[KT],
|
||||
value_type: Type[VT],
|
||||
optional: bool = False,
|
||||
invariant: Callable[[Any], Tuple[bool, Optional[str]]] = lambda _: (True, None),
|
||||
) -> PMap[KT, VT]: ...
|
||||
@overload
|
||||
def pmap_field(
|
||||
key_type: Any,
|
||||
value_type: Any,
|
||||
optional: bool = False,
|
||||
invariant: Callable[[Any], Tuple[bool, Optional[str]]] = lambda _: (True, None),
|
||||
) -> PMap[Any, Any]: ...
|
||||
|
||||
@overload
|
||||
def pvector_field(
|
||||
item_type: Type[T],
|
||||
optional: bool = False,
|
||||
initial: Iterable[T] = ...,
|
||||
) -> PVector[T]: ...
|
||||
@overload
|
||||
def pvector_field(
|
||||
item_type: Any,
|
||||
optional: bool = False,
|
||||
initial: Any = (),
|
||||
) -> PVector[Any]: ...
|
||||
|
||||
def pbag(elements: Iterable[T]) -> PBag[T]: ...
|
||||
def b(*elements: T) -> PBag[T]: ...
|
||||
|
||||
def plist(iterable: Iterable[T] = (), reverse: bool = False) -> PList[T]: ...
|
||||
def l(*elements: T) -> PList[T]: ...
|
||||
|
||||
def pdeque(iterable: Optional[Iterable[T]] = None, maxlen: Optional[int] = None) -> PDeque[T]: ...
|
||||
def dq(*iterable: T) -> PDeque[T]: ...
|
||||
|
||||
@overload
|
||||
def optional(type: T) -> Tuple[T, Type[None]]: ...
|
||||
@overload
|
||||
def optional(*typs: Any) -> Tuple[Any, ...]: ...
|
||||
|
||||
T_PRecord = TypeVar('T_PRecord', bound='PRecord')
|
||||
class PRecord(PMap[AnyStr, Any]):
|
||||
_precord_fields: Mapping
|
||||
_precord_initial_values: Mapping
|
||||
|
||||
def __hash__(self) -> int: ...
|
||||
def __init__(self, **kwargs: Any) -> None: ...
|
||||
def __iter__(self) -> Iterator[Any]: ...
|
||||
def __len__(self) -> int: ...
|
||||
@classmethod
|
||||
def create(
|
||||
cls: Type[T_PRecord],
|
||||
kwargs: Mapping,
|
||||
_factory_fields: Optional[Iterable] = None,
|
||||
ignore_extra: bool = False,
|
||||
) -> T_PRecord: ...
|
||||
# This is OK because T_PRecord is a concrete type
|
||||
def discard(self: T_PRecord, key: KT) -> T_PRecord: ...
|
||||
def remove(self: T_PRecord, key: KT) -> T_PRecord: ...
|
||||
|
||||
def serialize(self, format: Optional[Any] = ...) -> MutableMapping: ...
|
||||
|
||||
# From pyrsistent documentation:
|
||||
# This set function differs slightly from that in the PMap
|
||||
# class. First of all it accepts key-value pairs. Second it accepts multiple key-value
|
||||
# pairs to perform one, atomic, update of multiple fields.
|
||||
@overload
|
||||
def set(self, key: KT, val: VT) -> Any: ...
|
||||
@overload
|
||||
def set(self, **kwargs: VT) -> Any: ...
|
||||
|
||||
def immutable(
|
||||
members: Union[str, Iterable[str]] = '',
|
||||
name: str = 'Immutable',
|
||||
verbose: bool = False,
|
||||
) -> Tuple: ... # actually a namedtuple
|
||||
|
||||
# ignore mypy warning "Overloaded function signatures 1 and 5 overlap with
|
||||
# incompatible return types"
|
||||
@overload
|
||||
def freeze(o: Mapping[KT, VT]) -> PMap[KT, VT]: ... # type: ignore
|
||||
@overload
|
||||
def freeze(o: List[T]) -> PVector[T]: ... # type: ignore
|
||||
@overload
|
||||
def freeze(o: Tuple[T, ...]) -> Tuple[T, ...]: ...
|
||||
@overload
|
||||
def freeze(o: Set[T]) -> PSet[T]: ... # type: ignore
|
||||
@overload
|
||||
def freeze(o: T) -> T: ...
|
||||
|
||||
|
||||
@overload
|
||||
def thaw(o: PMap[KT, VT]) -> MutableMapping[KT, VT]: ... # type: ignore
|
||||
@overload
|
||||
def thaw(o: PVector[T]) -> List[T]: ... # type: ignore
|
||||
@overload
|
||||
def thaw(o: Tuple[T, ...]) -> Tuple[T, ...]: ...
|
||||
# collections.abc.MutableSet is kind of garbage:
|
||||
# https://stackoverflow.com/questions/24977898/why-does-collections-mutableset-not-bestow-an-update-method
|
||||
@overload
|
||||
def thaw(o: PSet[T]) -> Set[T]: ... # type: ignore
|
||||
@overload
|
||||
def thaw(o: T) -> T: ...
|
||||
|
||||
def mutant(fn: Callable) -> Callable: ...
|
||||
|
||||
def inc(x: int) -> int: ...
|
||||
@overload
|
||||
def discard(evolver: PMapEvolver[KT, VT], key: KT) -> None: ...
|
||||
@overload
|
||||
def discard(evolver: PVectorEvolver[T], key: int) -> None: ...
|
||||
@overload
|
||||
def discard(evolver: PSetEvolver[T], key: T) -> None: ...
|
||||
def rex(expr: str) -> Callable[[Any], bool]: ...
|
||||
def ny(_: Any) -> bool: ...
|
||||
|
||||
def get_in(keys: Iterable, coll: Mapping, default: Optional[Any] = None, no_default: bool = False) -> Any: ...
|
542
.venv/Lib/site-packages/pyrsistent/_checked_types.py
Normal file
542
.venv/Lib/site-packages/pyrsistent/_checked_types.py
Normal file
@ -0,0 +1,542 @@
|
||||
from enum import Enum
|
||||
|
||||
from abc import abstractmethod, ABCMeta
|
||||
from collections.abc import Iterable
|
||||
|
||||
from pyrsistent._pmap import PMap, pmap
|
||||
from pyrsistent._pset import PSet, pset
|
||||
from pyrsistent._pvector import PythonPVector, python_pvector
|
||||
|
||||
|
||||
class CheckedType(object):
|
||||
"""
|
||||
Marker class to enable creation and serialization of checked object graphs.
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def create(cls, source_data, _factory_fields=None):
|
||||
raise NotImplementedError()
|
||||
|
||||
@abstractmethod
|
||||
def serialize(self, format=None):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _restore_pickle(cls, data):
|
||||
return cls.create(data, _factory_fields=set())
|
||||
|
||||
|
||||
class InvariantException(Exception):
|
||||
"""
|
||||
Exception raised from a :py:class:`CheckedType` when invariant tests fail or when a mandatory
|
||||
field is missing.
|
||||
|
||||
Contains two fields of interest:
|
||||
invariant_errors, a tuple of error data for the failing invariants
|
||||
missing_fields, a tuple of strings specifying the missing names
|
||||
"""
|
||||
|
||||
def __init__(self, error_codes=(), missing_fields=(), *args, **kwargs):
|
||||
self.invariant_errors = tuple(e() if callable(e) else e for e in error_codes)
|
||||
self.missing_fields = missing_fields
|
||||
super(InvariantException, self).__init__(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return super(InvariantException, self).__str__() + \
|
||||
", invariant_errors=[{invariant_errors}], missing_fields=[{missing_fields}]".format(
|
||||
invariant_errors=', '.join(str(e) for e in self.invariant_errors),
|
||||
missing_fields=', '.join(self.missing_fields))
|
||||
|
||||
|
||||
_preserved_iterable_types = (
|
||||
Enum,
|
||||
)
|
||||
"""Some types are themselves iterable, but we want to use the type itself and
|
||||
not its members for the type specification. This defines a set of such types
|
||||
that we explicitly preserve.
|
||||
|
||||
Note that strings are not such types because the string inputs we pass in are
|
||||
values, not types.
|
||||
"""
|
||||
|
||||
|
||||
def maybe_parse_user_type(t):
|
||||
"""Try to coerce a user-supplied type directive into a list of types.
|
||||
|
||||
This function should be used in all places where a user specifies a type,
|
||||
for consistency.
|
||||
|
||||
The policy for what defines valid user input should be clear from the implementation.
|
||||
"""
|
||||
is_type = isinstance(t, type)
|
||||
is_preserved = isinstance(t, type) and issubclass(t, _preserved_iterable_types)
|
||||
is_string = isinstance(t, str)
|
||||
is_iterable = isinstance(t, Iterable)
|
||||
|
||||
if is_preserved:
|
||||
return [t]
|
||||
elif is_string:
|
||||
return [t]
|
||||
elif is_type and not is_iterable:
|
||||
return [t]
|
||||
elif is_iterable:
|
||||
# Recur to validate contained types as well.
|
||||
ts = t
|
||||
return tuple(e for t in ts for e in maybe_parse_user_type(t))
|
||||
else:
|
||||
# If this raises because `t` cannot be formatted, so be it.
|
||||
raise TypeError(
|
||||
'Type specifications must be types or strings. Input: {}'.format(t)
|
||||
)
|
||||
|
||||
|
||||
def maybe_parse_many_user_types(ts):
|
||||
# Just a different name to communicate that you're parsing multiple user
|
||||
# inputs. `maybe_parse_user_type` handles the iterable case anyway.
|
||||
return maybe_parse_user_type(ts)
|
||||
|
||||
|
||||
def _store_types(dct, bases, destination_name, source_name):
|
||||
maybe_types = maybe_parse_many_user_types([
|
||||
d[source_name]
|
||||
for d in ([dct] + [b.__dict__ for b in bases]) if source_name in d
|
||||
])
|
||||
|
||||
dct[destination_name] = maybe_types
|
||||
|
||||
|
||||
def _merge_invariant_results(result):
|
||||
verdict = True
|
||||
data = []
|
||||
for verd, dat in result:
|
||||
if not verd:
|
||||
verdict = False
|
||||
data.append(dat)
|
||||
|
||||
return verdict, tuple(data)
|
||||
|
||||
|
||||
def wrap_invariant(invariant):
|
||||
# Invariant functions may return the outcome of several tests
|
||||
# In those cases the results have to be merged before being passed
|
||||
# back to the client.
|
||||
def f(*args, **kwargs):
|
||||
result = invariant(*args, **kwargs)
|
||||
if isinstance(result[0], bool):
|
||||
return result
|
||||
|
||||
return _merge_invariant_results(result)
|
||||
|
||||
return f
|
||||
|
||||
|
||||
def _all_dicts(bases, seen=None):
|
||||
"""
|
||||
Yield each class in ``bases`` and each of their base classes.
|
||||
"""
|
||||
if seen is None:
|
||||
seen = set()
|
||||
for cls in bases:
|
||||
if cls in seen:
|
||||
continue
|
||||
seen.add(cls)
|
||||
yield cls.__dict__
|
||||
for b in _all_dicts(cls.__bases__, seen):
|
||||
yield b
|
||||
|
||||
|
||||
def store_invariants(dct, bases, destination_name, source_name):
|
||||
# Invariants are inherited
|
||||
invariants = []
|
||||
for ns in [dct] + list(_all_dicts(bases)):
|
||||
try:
|
||||
invariant = ns[source_name]
|
||||
except KeyError:
|
||||
continue
|
||||
invariants.append(invariant)
|
||||
|
||||
if not all(callable(invariant) for invariant in invariants):
|
||||
raise TypeError('Invariants must be callable')
|
||||
dct[destination_name] = tuple(wrap_invariant(inv) for inv in invariants)
|
||||
|
||||
|
||||
class _CheckedTypeMeta(ABCMeta):
|
||||
def __new__(mcs, name, bases, dct):
|
||||
_store_types(dct, bases, '_checked_types', '__type__')
|
||||
store_invariants(dct, bases, '_checked_invariants', '__invariant__')
|
||||
|
||||
def default_serializer(self, _, value):
|
||||
if isinstance(value, CheckedType):
|
||||
return value.serialize()
|
||||
return value
|
||||
|
||||
dct.setdefault('__serializer__', default_serializer)
|
||||
|
||||
dct['__slots__'] = ()
|
||||
|
||||
return super(_CheckedTypeMeta, mcs).__new__(mcs, name, bases, dct)
|
||||
|
||||
|
||||
class CheckedTypeError(TypeError):
|
||||
def __init__(self, source_class, expected_types, actual_type, actual_value, *args, **kwargs):
|
||||
super(CheckedTypeError, self).__init__(*args, **kwargs)
|
||||
self.source_class = source_class
|
||||
self.expected_types = expected_types
|
||||
self.actual_type = actual_type
|
||||
self.actual_value = actual_value
|
||||
|
||||
|
||||
class CheckedKeyTypeError(CheckedTypeError):
|
||||
"""
|
||||
Raised when trying to set a value using a key with a type that doesn't match the declared type.
|
||||
|
||||
Attributes:
|
||||
source_class -- The class of the collection
|
||||
expected_types -- Allowed types
|
||||
actual_type -- The non matching type
|
||||
actual_value -- Value of the variable with the non matching type
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class CheckedValueTypeError(CheckedTypeError):
|
||||
"""
|
||||
Raised when trying to set a value using a key with a type that doesn't match the declared type.
|
||||
|
||||
Attributes:
|
||||
source_class -- The class of the collection
|
||||
expected_types -- Allowed types
|
||||
actual_type -- The non matching type
|
||||
actual_value -- Value of the variable with the non matching type
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def _get_class(type_name):
|
||||
module_name, class_name = type_name.rsplit('.', 1)
|
||||
module = __import__(module_name, fromlist=[class_name])
|
||||
return getattr(module, class_name)
|
||||
|
||||
|
||||
def get_type(typ):
|
||||
if isinstance(typ, type):
|
||||
return typ
|
||||
|
||||
return _get_class(typ)
|
||||
|
||||
|
||||
def get_types(typs):
|
||||
return [get_type(typ) for typ in typs]
|
||||
|
||||
|
||||
def _check_types(it, expected_types, source_class, exception_type=CheckedValueTypeError):
|
||||
if expected_types:
|
||||
for e in it:
|
||||
if not any(isinstance(e, get_type(t)) for t in expected_types):
|
||||
actual_type = type(e)
|
||||
msg = "Type {source_class} can only be used with {expected_types}, not {actual_type}".format(
|
||||
source_class=source_class.__name__,
|
||||
expected_types=tuple(get_type(et).__name__ for et in expected_types),
|
||||
actual_type=actual_type.__name__)
|
||||
raise exception_type(source_class, expected_types, actual_type, e, msg)
|
||||
|
||||
|
||||
def _invariant_errors(elem, invariants):
|
||||
return [data for valid, data in (invariant(elem) for invariant in invariants) if not valid]
|
||||
|
||||
|
||||
def _invariant_errors_iterable(it, invariants):
|
||||
return sum([_invariant_errors(elem, invariants) for elem in it], [])
|
||||
|
||||
|
||||
def optional(*typs):
|
||||
""" Convenience function to specify that a value may be of any of the types in type 'typs' or None """
|
||||
return tuple(typs) + (type(None),)
|
||||
|
||||
|
||||
def _checked_type_create(cls, source_data, _factory_fields=None, ignore_extra=False):
|
||||
if isinstance(source_data, cls):
|
||||
return source_data
|
||||
|
||||
# Recursively apply create methods of checked types if the types of the supplied data
|
||||
# does not match any of the valid types.
|
||||
types = get_types(cls._checked_types)
|
||||
checked_type = next((t for t in types if issubclass(t, CheckedType)), None)
|
||||
if checked_type:
|
||||
return cls([checked_type.create(data, ignore_extra=ignore_extra)
|
||||
if not any(isinstance(data, t) for t in types) else data
|
||||
for data in source_data])
|
||||
|
||||
return cls(source_data)
|
||||
|
||||
class CheckedPVector(PythonPVector, CheckedType, metaclass=_CheckedTypeMeta):
|
||||
"""
|
||||
A CheckedPVector is a PVector which allows specifying type and invariant checks.
|
||||
|
||||
>>> class Positives(CheckedPVector):
|
||||
... __type__ = (int, float)
|
||||
... __invariant__ = lambda n: (n >= 0, 'Negative')
|
||||
...
|
||||
>>> Positives([1, 2, 3])
|
||||
Positives([1, 2, 3])
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __new__(cls, initial=()):
|
||||
if type(initial) == PythonPVector:
|
||||
return super(CheckedPVector, cls).__new__(cls, initial._count, initial._shift, initial._root, initial._tail)
|
||||
|
||||
return CheckedPVector.Evolver(cls, python_pvector()).extend(initial).persistent()
|
||||
|
||||
def set(self, key, value):
|
||||
return self.evolver().set(key, value).persistent()
|
||||
|
||||
def append(self, val):
|
||||
return self.evolver().append(val).persistent()
|
||||
|
||||
def extend(self, it):
|
||||
return self.evolver().extend(it).persistent()
|
||||
|
||||
create = classmethod(_checked_type_create)
|
||||
|
||||
def serialize(self, format=None):
|
||||
serializer = self.__serializer__
|
||||
return list(serializer(format, v) for v in self)
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return _restore_pickle, (self.__class__, list(self),)
|
||||
|
||||
class Evolver(PythonPVector.Evolver):
|
||||
__slots__ = ('_destination_class', '_invariant_errors')
|
||||
|
||||
def __init__(self, destination_class, vector):
|
||||
super(CheckedPVector.Evolver, self).__init__(vector)
|
||||
self._destination_class = destination_class
|
||||
self._invariant_errors = []
|
||||
|
||||
def _check(self, it):
|
||||
_check_types(it, self._destination_class._checked_types, self._destination_class)
|
||||
error_data = _invariant_errors_iterable(it, self._destination_class._checked_invariants)
|
||||
self._invariant_errors.extend(error_data)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self._check([value])
|
||||
return super(CheckedPVector.Evolver, self).__setitem__(key, value)
|
||||
|
||||
def append(self, elem):
|
||||
self._check([elem])
|
||||
return super(CheckedPVector.Evolver, self).append(elem)
|
||||
|
||||
def extend(self, it):
|
||||
it = list(it)
|
||||
self._check(it)
|
||||
return super(CheckedPVector.Evolver, self).extend(it)
|
||||
|
||||
def persistent(self):
|
||||
if self._invariant_errors:
|
||||
raise InvariantException(error_codes=self._invariant_errors)
|
||||
|
||||
result = self._orig_pvector
|
||||
if self.is_dirty() or (self._destination_class != type(self._orig_pvector)):
|
||||
pv = super(CheckedPVector.Evolver, self).persistent().extend(self._extra_tail)
|
||||
result = self._destination_class(pv)
|
||||
self._reset(result)
|
||||
|
||||
return result
|
||||
|
||||
def __repr__(self):
|
||||
return self.__class__.__name__ + "({0})".format(self.tolist())
|
||||
|
||||
__str__ = __repr__
|
||||
|
||||
def evolver(self):
|
||||
return CheckedPVector.Evolver(self.__class__, self)
|
||||
|
||||
|
||||
class CheckedPSet(PSet, CheckedType, metaclass=_CheckedTypeMeta):
|
||||
"""
|
||||
A CheckedPSet is a PSet which allows specifying type and invariant checks.
|
||||
|
||||
>>> class Positives(CheckedPSet):
|
||||
... __type__ = (int, float)
|
||||
... __invariant__ = lambda n: (n >= 0, 'Negative')
|
||||
...
|
||||
>>> Positives([1, 2, 3])
|
||||
Positives([1, 2, 3])
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __new__(cls, initial=()):
|
||||
if type(initial) is PMap:
|
||||
return super(CheckedPSet, cls).__new__(cls, initial)
|
||||
|
||||
evolver = CheckedPSet.Evolver(cls, pset())
|
||||
for e in initial:
|
||||
evolver.add(e)
|
||||
|
||||
return evolver.persistent()
|
||||
|
||||
def __repr__(self):
|
||||
return self.__class__.__name__ + super(CheckedPSet, self).__repr__()[4:]
|
||||
|
||||
def __str__(self):
|
||||
return self.__repr__()
|
||||
|
||||
def serialize(self, format=None):
|
||||
serializer = self.__serializer__
|
||||
return set(serializer(format, v) for v in self)
|
||||
|
||||
create = classmethod(_checked_type_create)
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return _restore_pickle, (self.__class__, list(self),)
|
||||
|
||||
def evolver(self):
|
||||
return CheckedPSet.Evolver(self.__class__, self)
|
||||
|
||||
class Evolver(PSet._Evolver):
|
||||
__slots__ = ('_destination_class', '_invariant_errors')
|
||||
|
||||
def __init__(self, destination_class, original_set):
|
||||
super(CheckedPSet.Evolver, self).__init__(original_set)
|
||||
self._destination_class = destination_class
|
||||
self._invariant_errors = []
|
||||
|
||||
def _check(self, it):
|
||||
_check_types(it, self._destination_class._checked_types, self._destination_class)
|
||||
error_data = _invariant_errors_iterable(it, self._destination_class._checked_invariants)
|
||||
self._invariant_errors.extend(error_data)
|
||||
|
||||
def add(self, element):
|
||||
self._check([element])
|
||||
self._pmap_evolver[element] = True
|
||||
return self
|
||||
|
||||
def persistent(self):
|
||||
if self._invariant_errors:
|
||||
raise InvariantException(error_codes=self._invariant_errors)
|
||||
|
||||
if self.is_dirty() or self._destination_class != type(self._original_pset):
|
||||
return self._destination_class(self._pmap_evolver.persistent())
|
||||
|
||||
return self._original_pset
|
||||
|
||||
|
||||
class _CheckedMapTypeMeta(type):
|
||||
def __new__(mcs, name, bases, dct):
|
||||
_store_types(dct, bases, '_checked_key_types', '__key_type__')
|
||||
_store_types(dct, bases, '_checked_value_types', '__value_type__')
|
||||
store_invariants(dct, bases, '_checked_invariants', '__invariant__')
|
||||
|
||||
def default_serializer(self, _, key, value):
|
||||
sk = key
|
||||
if isinstance(key, CheckedType):
|
||||
sk = key.serialize()
|
||||
|
||||
sv = value
|
||||
if isinstance(value, CheckedType):
|
||||
sv = value.serialize()
|
||||
|
||||
return sk, sv
|
||||
|
||||
dct.setdefault('__serializer__', default_serializer)
|
||||
|
||||
dct['__slots__'] = ()
|
||||
|
||||
return super(_CheckedMapTypeMeta, mcs).__new__(mcs, name, bases, dct)
|
||||
|
||||
# Marker object
|
||||
_UNDEFINED_CHECKED_PMAP_SIZE = object()
|
||||
|
||||
|
||||
class CheckedPMap(PMap, CheckedType, metaclass=_CheckedMapTypeMeta):
|
||||
"""
|
||||
A CheckedPMap is a PMap which allows specifying type and invariant checks.
|
||||
|
||||
>>> class IntToFloatMap(CheckedPMap):
|
||||
... __key_type__ = int
|
||||
... __value_type__ = float
|
||||
... __invariant__ = lambda k, v: (int(v) == k, 'Invalid mapping')
|
||||
...
|
||||
>>> IntToFloatMap({1: 1.5, 2: 2.25})
|
||||
IntToFloatMap({1: 1.5, 2: 2.25})
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __new__(cls, initial={}, size=_UNDEFINED_CHECKED_PMAP_SIZE):
|
||||
if size is not _UNDEFINED_CHECKED_PMAP_SIZE:
|
||||
return super(CheckedPMap, cls).__new__(cls, size, initial)
|
||||
|
||||
evolver = CheckedPMap.Evolver(cls, pmap())
|
||||
for k, v in initial.items():
|
||||
evolver.set(k, v)
|
||||
|
||||
return evolver.persistent()
|
||||
|
||||
def evolver(self):
|
||||
return CheckedPMap.Evolver(self.__class__, self)
|
||||
|
||||
def __repr__(self):
|
||||
return self.__class__.__name__ + "({0})".format(str(dict(self)))
|
||||
|
||||
__str__ = __repr__
|
||||
|
||||
def serialize(self, format=None):
|
||||
serializer = self.__serializer__
|
||||
return dict(serializer(format, k, v) for k, v in self.items())
|
||||
|
||||
@classmethod
|
||||
def create(cls, source_data, _factory_fields=None):
|
||||
if isinstance(source_data, cls):
|
||||
return source_data
|
||||
|
||||
# Recursively apply create methods of checked types if the types of the supplied data
|
||||
# does not match any of the valid types.
|
||||
key_types = get_types(cls._checked_key_types)
|
||||
checked_key_type = next((t for t in key_types if issubclass(t, CheckedType)), None)
|
||||
value_types = get_types(cls._checked_value_types)
|
||||
checked_value_type = next((t for t in value_types if issubclass(t, CheckedType)), None)
|
||||
|
||||
if checked_key_type or checked_value_type:
|
||||
return cls(dict((checked_key_type.create(key) if checked_key_type and not any(isinstance(key, t) for t in key_types) else key,
|
||||
checked_value_type.create(value) if checked_value_type and not any(isinstance(value, t) for t in value_types) else value)
|
||||
for key, value in source_data.items()))
|
||||
|
||||
return cls(source_data)
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return _restore_pickle, (self.__class__, dict(self),)
|
||||
|
||||
class Evolver(PMap._Evolver):
|
||||
__slots__ = ('_destination_class', '_invariant_errors')
|
||||
|
||||
def __init__(self, destination_class, original_map):
|
||||
super(CheckedPMap.Evolver, self).__init__(original_map)
|
||||
self._destination_class = destination_class
|
||||
self._invariant_errors = []
|
||||
|
||||
def set(self, key, value):
|
||||
_check_types([key], self._destination_class._checked_key_types, self._destination_class, CheckedKeyTypeError)
|
||||
_check_types([value], self._destination_class._checked_value_types, self._destination_class)
|
||||
self._invariant_errors.extend(data for valid, data in (invariant(key, value)
|
||||
for invariant in self._destination_class._checked_invariants)
|
||||
if not valid)
|
||||
|
||||
return super(CheckedPMap.Evolver, self).set(key, value)
|
||||
|
||||
def persistent(self):
|
||||
if self._invariant_errors:
|
||||
raise InvariantException(error_codes=self._invariant_errors)
|
||||
|
||||
if self.is_dirty() or type(self._original_pmap) != self._destination_class:
|
||||
return self._destination_class(self._buckets_evolver.persistent(), self._size)
|
||||
|
||||
return self._original_pmap
|
332
.venv/Lib/site-packages/pyrsistent/_field_common.py
Normal file
332
.venv/Lib/site-packages/pyrsistent/_field_common.py
Normal file
@ -0,0 +1,332 @@
|
||||
from pyrsistent._checked_types import (
|
||||
CheckedPMap,
|
||||
CheckedPSet,
|
||||
CheckedPVector,
|
||||
CheckedType,
|
||||
InvariantException,
|
||||
_restore_pickle,
|
||||
get_type,
|
||||
maybe_parse_user_type,
|
||||
maybe_parse_many_user_types,
|
||||
)
|
||||
from pyrsistent._checked_types import optional as optional_type
|
||||
from pyrsistent._checked_types import wrap_invariant
|
||||
import inspect
|
||||
|
||||
|
||||
def set_fields(dct, bases, name):
|
||||
dct[name] = dict(sum([list(b.__dict__.get(name, {}).items()) for b in bases], []))
|
||||
|
||||
for k, v in list(dct.items()):
|
||||
if isinstance(v, _PField):
|
||||
dct[name][k] = v
|
||||
del dct[k]
|
||||
|
||||
|
||||
def check_global_invariants(subject, invariants):
|
||||
error_codes = tuple(error_code for is_ok, error_code in
|
||||
(invariant(subject) for invariant in invariants) if not is_ok)
|
||||
if error_codes:
|
||||
raise InvariantException(error_codes, (), 'Global invariant failed')
|
||||
|
||||
|
||||
def serialize(serializer, format, value):
|
||||
if isinstance(value, CheckedType) and serializer is PFIELD_NO_SERIALIZER:
|
||||
return value.serialize(format)
|
||||
|
||||
return serializer(format, value)
|
||||
|
||||
|
||||
def check_type(destination_cls, field, name, value):
|
||||
if field.type and not any(isinstance(value, get_type(t)) for t in field.type):
|
||||
actual_type = type(value)
|
||||
message = "Invalid type for field {0}.{1}, was {2}".format(destination_cls.__name__, name, actual_type.__name__)
|
||||
raise PTypeError(destination_cls, name, field.type, actual_type, message)
|
||||
|
||||
|
||||
def is_type_cls(type_cls, field_type):
|
||||
if type(field_type) is set:
|
||||
return True
|
||||
types = tuple(field_type)
|
||||
if len(types) == 0:
|
||||
return False
|
||||
return issubclass(get_type(types[0]), type_cls)
|
||||
|
||||
|
||||
def is_field_ignore_extra_complaint(type_cls, field, ignore_extra):
|
||||
# ignore_extra param has default False value, for speed purpose no need to propagate False
|
||||
if not ignore_extra:
|
||||
return False
|
||||
|
||||
if not is_type_cls(type_cls, field.type):
|
||||
return False
|
||||
|
||||
return 'ignore_extra' in inspect.signature(field.factory).parameters
|
||||
|
||||
|
||||
|
||||
class _PField(object):
|
||||
__slots__ = ('type', 'invariant', 'initial', 'mandatory', '_factory', 'serializer')
|
||||
|
||||
def __init__(self, type, invariant, initial, mandatory, factory, serializer):
|
||||
self.type = type
|
||||
self.invariant = invariant
|
||||
self.initial = initial
|
||||
self.mandatory = mandatory
|
||||
self._factory = factory
|
||||
self.serializer = serializer
|
||||
|
||||
@property
|
||||
def factory(self):
|
||||
# If no factory is specified and the type is another CheckedType use the factory method of that CheckedType
|
||||
if self._factory is PFIELD_NO_FACTORY and len(self.type) == 1:
|
||||
typ = get_type(tuple(self.type)[0])
|
||||
if issubclass(typ, CheckedType):
|
||||
return typ.create
|
||||
|
||||
return self._factory
|
||||
|
||||
PFIELD_NO_TYPE = ()
|
||||
PFIELD_NO_INVARIANT = lambda _: (True, None)
|
||||
PFIELD_NO_FACTORY = lambda x: x
|
||||
PFIELD_NO_INITIAL = object()
|
||||
PFIELD_NO_SERIALIZER = lambda _, value: value
|
||||
|
||||
|
||||
def field(type=PFIELD_NO_TYPE, invariant=PFIELD_NO_INVARIANT, initial=PFIELD_NO_INITIAL,
|
||||
mandatory=False, factory=PFIELD_NO_FACTORY, serializer=PFIELD_NO_SERIALIZER):
|
||||
"""
|
||||
Field specification factory for :py:class:`PRecord`.
|
||||
|
||||
:param type: a type or iterable with types that are allowed for this field
|
||||
:param invariant: a function specifying an invariant that must hold for the field
|
||||
:param initial: value of field if not specified when instantiating the record
|
||||
:param mandatory: boolean specifying if the field is mandatory or not
|
||||
:param factory: function called when field is set.
|
||||
:param serializer: function that returns a serialized version of the field
|
||||
"""
|
||||
|
||||
# NB: We have to check this predicate separately from the predicates in
|
||||
# `maybe_parse_user_type` et al. because this one is related to supporting
|
||||
# the argspec for `field`, while those are related to supporting the valid
|
||||
# ways to specify types.
|
||||
|
||||
# Multiple types must be passed in one of the following containers. Note
|
||||
# that a type that is a subclass of one of these containers, like a
|
||||
# `collections.namedtuple`, will work as expected, since we check
|
||||
# `isinstance` and not `issubclass`.
|
||||
if isinstance(type, (list, set, tuple)):
|
||||
types = set(maybe_parse_many_user_types(type))
|
||||
else:
|
||||
types = set(maybe_parse_user_type(type))
|
||||
|
||||
invariant_function = wrap_invariant(invariant) if invariant != PFIELD_NO_INVARIANT and callable(invariant) else invariant
|
||||
field = _PField(type=types, invariant=invariant_function, initial=initial,
|
||||
mandatory=mandatory, factory=factory, serializer=serializer)
|
||||
|
||||
_check_field_parameters(field)
|
||||
|
||||
return field
|
||||
|
||||
|
||||
def _check_field_parameters(field):
|
||||
for t in field.type:
|
||||
if not isinstance(t, type) and not isinstance(t, str):
|
||||
raise TypeError('Type parameter expected, not {0}'.format(type(t)))
|
||||
|
||||
if field.initial is not PFIELD_NO_INITIAL and \
|
||||
not callable(field.initial) and \
|
||||
field.type and not any(isinstance(field.initial, t) for t in field.type):
|
||||
raise TypeError('Initial has invalid type {0}'.format(type(field.initial)))
|
||||
|
||||
if not callable(field.invariant):
|
||||
raise TypeError('Invariant must be callable')
|
||||
|
||||
if not callable(field.factory):
|
||||
raise TypeError('Factory must be callable')
|
||||
|
||||
if not callable(field.serializer):
|
||||
raise TypeError('Serializer must be callable')
|
||||
|
||||
|
||||
class PTypeError(TypeError):
|
||||
"""
|
||||
Raised when trying to assign a value with a type that doesn't match the declared type.
|
||||
|
||||
Attributes:
|
||||
source_class -- The class of the record
|
||||
field -- Field name
|
||||
expected_types -- Types allowed for the field
|
||||
actual_type -- The non matching type
|
||||
"""
|
||||
def __init__(self, source_class, field, expected_types, actual_type, *args, **kwargs):
|
||||
super(PTypeError, self).__init__(*args, **kwargs)
|
||||
self.source_class = source_class
|
||||
self.field = field
|
||||
self.expected_types = expected_types
|
||||
self.actual_type = actual_type
|
||||
|
||||
|
||||
SEQ_FIELD_TYPE_SUFFIXES = {
|
||||
CheckedPVector: "PVector",
|
||||
CheckedPSet: "PSet",
|
||||
}
|
||||
|
||||
# Global dictionary to hold auto-generated field types: used for unpickling
|
||||
_seq_field_types = {}
|
||||
|
||||
def _restore_seq_field_pickle(checked_class, item_type, data):
|
||||
"""Unpickling function for auto-generated PVec/PSet field types."""
|
||||
type_ = _seq_field_types[checked_class, item_type]
|
||||
return _restore_pickle(type_, data)
|
||||
|
||||
def _types_to_names(types):
|
||||
"""Convert a tuple of types to a human-readable string."""
|
||||
return "".join(get_type(typ).__name__.capitalize() for typ in types)
|
||||
|
||||
def _make_seq_field_type(checked_class, item_type, item_invariant):
|
||||
"""Create a subclass of the given checked class with the given item type."""
|
||||
type_ = _seq_field_types.get((checked_class, item_type))
|
||||
if type_ is not None:
|
||||
return type_
|
||||
|
||||
class TheType(checked_class):
|
||||
__type__ = item_type
|
||||
__invariant__ = item_invariant
|
||||
|
||||
def __reduce__(self):
|
||||
return (_restore_seq_field_pickle,
|
||||
(checked_class, item_type, list(self)))
|
||||
|
||||
suffix = SEQ_FIELD_TYPE_SUFFIXES[checked_class]
|
||||
TheType.__name__ = _types_to_names(TheType._checked_types) + suffix
|
||||
_seq_field_types[checked_class, item_type] = TheType
|
||||
return TheType
|
||||
|
||||
def _sequence_field(checked_class, item_type, optional, initial,
|
||||
invariant=PFIELD_NO_INVARIANT,
|
||||
item_invariant=PFIELD_NO_INVARIANT):
|
||||
"""
|
||||
Create checked field for either ``PSet`` or ``PVector``.
|
||||
|
||||
:param checked_class: ``CheckedPSet`` or ``CheckedPVector``.
|
||||
:param item_type: The required type for the items in the set.
|
||||
:param optional: If true, ``None`` can be used as a value for
|
||||
this field.
|
||||
:param initial: Initial value to pass to factory.
|
||||
|
||||
:return: A ``field`` containing a checked class.
|
||||
"""
|
||||
TheType = _make_seq_field_type(checked_class, item_type, item_invariant)
|
||||
|
||||
if optional:
|
||||
def factory(argument, _factory_fields=None, ignore_extra=False):
|
||||
if argument is None:
|
||||
return None
|
||||
else:
|
||||
return TheType.create(argument, _factory_fields=_factory_fields, ignore_extra=ignore_extra)
|
||||
else:
|
||||
factory = TheType.create
|
||||
|
||||
return field(type=optional_type(TheType) if optional else TheType,
|
||||
factory=factory, mandatory=True,
|
||||
invariant=invariant,
|
||||
initial=factory(initial))
|
||||
|
||||
|
||||
def pset_field(item_type, optional=False, initial=(),
|
||||
invariant=PFIELD_NO_INVARIANT,
|
||||
item_invariant=PFIELD_NO_INVARIANT):
|
||||
"""
|
||||
Create checked ``PSet`` field.
|
||||
|
||||
:param item_type: The required type for the items in the set.
|
||||
:param optional: If true, ``None`` can be used as a value for
|
||||
this field.
|
||||
:param initial: Initial value to pass to factory if no value is given
|
||||
for the field.
|
||||
|
||||
:return: A ``field`` containing a ``CheckedPSet`` of the given type.
|
||||
"""
|
||||
return _sequence_field(CheckedPSet, item_type, optional, initial,
|
||||
invariant=invariant,
|
||||
item_invariant=item_invariant)
|
||||
|
||||
|
||||
def pvector_field(item_type, optional=False, initial=(),
|
||||
invariant=PFIELD_NO_INVARIANT,
|
||||
item_invariant=PFIELD_NO_INVARIANT):
|
||||
"""
|
||||
Create checked ``PVector`` field.
|
||||
|
||||
:param item_type: The required type for the items in the vector.
|
||||
:param optional: If true, ``None`` can be used as a value for
|
||||
this field.
|
||||
:param initial: Initial value to pass to factory if no value is given
|
||||
for the field.
|
||||
|
||||
:return: A ``field`` containing a ``CheckedPVector`` of the given type.
|
||||
"""
|
||||
return _sequence_field(CheckedPVector, item_type, optional, initial,
|
||||
invariant=invariant,
|
||||
item_invariant=item_invariant)
|
||||
|
||||
|
||||
_valid = lambda item: (True, "")
|
||||
|
||||
|
||||
# Global dictionary to hold auto-generated field types: used for unpickling
|
||||
_pmap_field_types = {}
|
||||
|
||||
def _restore_pmap_field_pickle(key_type, value_type, data):
|
||||
"""Unpickling function for auto-generated PMap field types."""
|
||||
type_ = _pmap_field_types[key_type, value_type]
|
||||
return _restore_pickle(type_, data)
|
||||
|
||||
def _make_pmap_field_type(key_type, value_type):
|
||||
"""Create a subclass of CheckedPMap with the given key and value types."""
|
||||
type_ = _pmap_field_types.get((key_type, value_type))
|
||||
if type_ is not None:
|
||||
return type_
|
||||
|
||||
class TheMap(CheckedPMap):
|
||||
__key_type__ = key_type
|
||||
__value_type__ = value_type
|
||||
|
||||
def __reduce__(self):
|
||||
return (_restore_pmap_field_pickle,
|
||||
(self.__key_type__, self.__value_type__, dict(self)))
|
||||
|
||||
TheMap.__name__ = "{0}To{1}PMap".format(
|
||||
_types_to_names(TheMap._checked_key_types),
|
||||
_types_to_names(TheMap._checked_value_types))
|
||||
_pmap_field_types[key_type, value_type] = TheMap
|
||||
return TheMap
|
||||
|
||||
|
||||
def pmap_field(key_type, value_type, optional=False, invariant=PFIELD_NO_INVARIANT):
|
||||
"""
|
||||
Create a checked ``PMap`` field.
|
||||
|
||||
:param key: The required type for the keys of the map.
|
||||
:param value: The required type for the values of the map.
|
||||
:param optional: If true, ``None`` can be used as a value for
|
||||
this field.
|
||||
:param invariant: Pass-through to ``field``.
|
||||
|
||||
:return: A ``field`` containing a ``CheckedPMap``.
|
||||
"""
|
||||
TheMap = _make_pmap_field_type(key_type, value_type)
|
||||
|
||||
if optional:
|
||||
def factory(argument):
|
||||
if argument is None:
|
||||
return None
|
||||
else:
|
||||
return TheMap.create(argument)
|
||||
else:
|
||||
factory = TheMap.create
|
||||
|
||||
return field(mandatory=True, initial=TheMap(),
|
||||
type=optional_type(TheMap) if optional else TheMap,
|
||||
factory=factory, invariant=invariant)
|
97
.venv/Lib/site-packages/pyrsistent/_helpers.py
Normal file
97
.venv/Lib/site-packages/pyrsistent/_helpers.py
Normal file
@ -0,0 +1,97 @@
|
||||
from functools import wraps
|
||||
from pyrsistent._pmap import PMap, pmap
|
||||
from pyrsistent._pset import PSet, pset
|
||||
from pyrsistent._pvector import PVector, pvector
|
||||
|
||||
def freeze(o, strict=True):
|
||||
"""
|
||||
Recursively convert simple Python containers into pyrsistent versions
|
||||
of those containers.
|
||||
|
||||
- list is converted to pvector, recursively
|
||||
- dict is converted to pmap, recursively on values (but not keys)
|
||||
- set is converted to pset, but not recursively
|
||||
- tuple is converted to tuple, recursively.
|
||||
|
||||
If strict == True (default):
|
||||
|
||||
- freeze is called on elements of pvectors
|
||||
- freeze is called on values of pmaps
|
||||
|
||||
Sets and dict keys are not recursively frozen because they do not contain
|
||||
mutable data by convention. The main exception to this rule is that
|
||||
dict keys and set elements are often instances of mutable objects that
|
||||
support hash-by-id, which this function can't convert anyway.
|
||||
|
||||
>>> freeze(set([1, 2]))
|
||||
pset([1, 2])
|
||||
>>> freeze([1, {'a': 3}])
|
||||
pvector([1, pmap({'a': 3})])
|
||||
>>> freeze((1, []))
|
||||
(1, pvector([]))
|
||||
"""
|
||||
typ = type(o)
|
||||
if typ is dict or (strict and isinstance(o, PMap)):
|
||||
return pmap({k: freeze(v, strict) for k, v in o.items()})
|
||||
if typ is list or (strict and isinstance(o, PVector)):
|
||||
curried_freeze = lambda x: freeze(x, strict)
|
||||
return pvector(map(curried_freeze, o))
|
||||
if typ is tuple:
|
||||
curried_freeze = lambda x: freeze(x, strict)
|
||||
return tuple(map(curried_freeze, o))
|
||||
if typ is set:
|
||||
# impossible to have anything that needs freezing inside a set or pset
|
||||
return pset(o)
|
||||
return o
|
||||
|
||||
|
||||
def thaw(o, strict=True):
|
||||
"""
|
||||
Recursively convert pyrsistent containers into simple Python containers.
|
||||
|
||||
- pvector is converted to list, recursively
|
||||
- pmap is converted to dict, recursively on values (but not keys)
|
||||
- pset is converted to set, but not recursively
|
||||
- tuple is converted to tuple, recursively.
|
||||
|
||||
If strict == True (the default):
|
||||
|
||||
- thaw is called on elements of lists
|
||||
- thaw is called on values in dicts
|
||||
|
||||
>>> from pyrsistent import s, m, v
|
||||
>>> thaw(s(1, 2))
|
||||
{1, 2}
|
||||
>>> thaw(v(1, m(a=3)))
|
||||
[1, {'a': 3}]
|
||||
>>> thaw((1, v()))
|
||||
(1, [])
|
||||
"""
|
||||
typ = type(o)
|
||||
if isinstance(o, PVector) or (strict and typ is list):
|
||||
curried_thaw = lambda x: thaw(x, strict)
|
||||
return list(map(curried_thaw, o))
|
||||
if isinstance(o, PMap) or (strict and typ is dict):
|
||||
return {k: thaw(v, strict) for k, v in o.items()}
|
||||
if typ is tuple:
|
||||
curried_thaw = lambda x: thaw(x, strict)
|
||||
return tuple(map(curried_thaw, o))
|
||||
if isinstance(o, PSet):
|
||||
# impossible to thaw inside psets or sets
|
||||
return set(o)
|
||||
return o
|
||||
|
||||
|
||||
def mutant(fn):
|
||||
"""
|
||||
Convenience decorator to isolate mutation to within the decorated function (with respect
|
||||
to the input arguments).
|
||||
|
||||
All arguments to the decorated function will be frozen so that they are guaranteed not to change.
|
||||
The return value is also frozen.
|
||||
"""
|
||||
@wraps(fn)
|
||||
def inner_f(*args, **kwargs):
|
||||
return freeze(fn(*[freeze(e) for e in args], **dict(freeze(item) for item in kwargs.items())))
|
||||
|
||||
return inner_f
|
103
.venv/Lib/site-packages/pyrsistent/_immutable.py
Normal file
103
.venv/Lib/site-packages/pyrsistent/_immutable.py
Normal file
@ -0,0 +1,103 @@
|
||||
import sys
|
||||
|
||||
|
||||
def immutable(members='', name='Immutable', verbose=False):
|
||||
"""
|
||||
Produces a class that either can be used standalone or as a base class for persistent classes.
|
||||
|
||||
This is a thin wrapper around a named tuple.
|
||||
|
||||
Constructing a type and using it to instantiate objects:
|
||||
|
||||
>>> Point = immutable('x, y', name='Point')
|
||||
>>> p = Point(1, 2)
|
||||
>>> p2 = p.set(x=3)
|
||||
>>> p
|
||||
Point(x=1, y=2)
|
||||
>>> p2
|
||||
Point(x=3, y=2)
|
||||
|
||||
Inheriting from a constructed type. In this case no type name needs to be supplied:
|
||||
|
||||
>>> class PositivePoint(immutable('x, y')):
|
||||
... __slots__ = tuple()
|
||||
... def __new__(cls, x, y):
|
||||
... if x > 0 and y > 0:
|
||||
... return super(PositivePoint, cls).__new__(cls, x, y)
|
||||
... raise Exception('Coordinates must be positive!')
|
||||
...
|
||||
>>> p = PositivePoint(1, 2)
|
||||
>>> p.set(x=3)
|
||||
PositivePoint(x=3, y=2)
|
||||
>>> p.set(y=-3)
|
||||
Traceback (most recent call last):
|
||||
Exception: Coordinates must be positive!
|
||||
|
||||
The persistent class also supports the notion of frozen members. The value of a frozen member
|
||||
cannot be updated. For example it could be used to implement an ID that should remain the same
|
||||
over time. A frozen member is denoted by a trailing underscore.
|
||||
|
||||
>>> Point = immutable('x, y, id_', name='Point')
|
||||
>>> p = Point(1, 2, id_=17)
|
||||
>>> p.set(x=3)
|
||||
Point(x=3, y=2, id_=17)
|
||||
>>> p.set(id_=18)
|
||||
Traceback (most recent call last):
|
||||
AttributeError: Cannot set frozen members id_
|
||||
"""
|
||||
|
||||
if isinstance(members, str):
|
||||
members = members.replace(',', ' ').split()
|
||||
|
||||
def frozen_member_test():
|
||||
frozen_members = ["'%s'" % f for f in members if f.endswith('_')]
|
||||
if frozen_members:
|
||||
return """
|
||||
frozen_fields = fields_to_modify & set([{frozen_members}])
|
||||
if frozen_fields:
|
||||
raise AttributeError('Cannot set frozen members %s' % ', '.join(frozen_fields))
|
||||
""".format(frozen_members=', '.join(frozen_members))
|
||||
|
||||
return ''
|
||||
|
||||
verbose_string = ""
|
||||
if sys.version_info < (3, 7):
|
||||
# Verbose is no longer supported in Python 3.7
|
||||
verbose_string = ", verbose={verbose}".format(verbose=verbose)
|
||||
|
||||
quoted_members = ', '.join("'%s'" % m for m in members)
|
||||
template = """
|
||||
class {class_name}(namedtuple('ImmutableBase', [{quoted_members}]{verbose_string})):
|
||||
__slots__ = tuple()
|
||||
|
||||
def __repr__(self):
|
||||
return super({class_name}, self).__repr__().replace('ImmutableBase', self.__class__.__name__)
|
||||
|
||||
def set(self, **kwargs):
|
||||
if not kwargs:
|
||||
return self
|
||||
|
||||
fields_to_modify = set(kwargs.keys())
|
||||
if not fields_to_modify <= {member_set}:
|
||||
raise AttributeError("'%s' is not a member" % ', '.join(fields_to_modify - {member_set}))
|
||||
|
||||
{frozen_member_test}
|
||||
|
||||
return self.__class__.__new__(self.__class__, *map(kwargs.pop, [{quoted_members}], self))
|
||||
""".format(quoted_members=quoted_members,
|
||||
member_set="set([%s])" % quoted_members if quoted_members else 'set()',
|
||||
frozen_member_test=frozen_member_test(),
|
||||
verbose_string=verbose_string,
|
||||
class_name=name)
|
||||
|
||||
if verbose:
|
||||
print(template)
|
||||
|
||||
from collections import namedtuple
|
||||
namespace = dict(namedtuple=namedtuple, __name__='pyrsistent_immutable')
|
||||
try:
|
||||
exec(template, namespace)
|
||||
except SyntaxError as e:
|
||||
raise SyntaxError(str(e) + ':\n' + template) from e
|
||||
|
||||
return namespace[name]
|
267
.venv/Lib/site-packages/pyrsistent/_pbag.py
Normal file
267
.venv/Lib/site-packages/pyrsistent/_pbag.py
Normal file
@ -0,0 +1,267 @@
|
||||
from collections.abc import Container, Iterable, Sized, Hashable
|
||||
from functools import reduce
|
||||
from pyrsistent._pmap import pmap
|
||||
|
||||
|
||||
def _add_to_counters(counters, element):
|
||||
return counters.set(element, counters.get(element, 0) + 1)
|
||||
|
||||
|
||||
class PBag(object):
|
||||
"""
|
||||
A persistent bag/multiset type.
|
||||
|
||||
Requires elements to be hashable, and allows duplicates, but has no
|
||||
ordering. Bags are hashable.
|
||||
|
||||
Do not instantiate directly, instead use the factory functions :py:func:`b`
|
||||
or :py:func:`pbag` to create an instance.
|
||||
|
||||
Some examples:
|
||||
|
||||
>>> s = pbag([1, 2, 3, 1])
|
||||
>>> s2 = s.add(4)
|
||||
>>> s3 = s2.remove(1)
|
||||
>>> s
|
||||
pbag([1, 1, 2, 3])
|
||||
>>> s2
|
||||
pbag([1, 1, 2, 3, 4])
|
||||
>>> s3
|
||||
pbag([1, 2, 3, 4])
|
||||
"""
|
||||
|
||||
__slots__ = ('_counts', '__weakref__')
|
||||
|
||||
def __init__(self, counts):
|
||||
self._counts = counts
|
||||
|
||||
def add(self, element):
|
||||
"""
|
||||
Add an element to the bag.
|
||||
|
||||
>>> s = pbag([1])
|
||||
>>> s2 = s.add(1)
|
||||
>>> s3 = s.add(2)
|
||||
>>> s2
|
||||
pbag([1, 1])
|
||||
>>> s3
|
||||
pbag([1, 2])
|
||||
"""
|
||||
return PBag(_add_to_counters(self._counts, element))
|
||||
|
||||
def update(self, iterable):
|
||||
"""
|
||||
Update bag with all elements in iterable.
|
||||
|
||||
>>> s = pbag([1])
|
||||
>>> s.update([1, 2])
|
||||
pbag([1, 1, 2])
|
||||
"""
|
||||
if iterable:
|
||||
return PBag(reduce(_add_to_counters, iterable, self._counts))
|
||||
|
||||
return self
|
||||
|
||||
def remove(self, element):
|
||||
"""
|
||||
Remove an element from the bag.
|
||||
|
||||
>>> s = pbag([1, 1, 2])
|
||||
>>> s2 = s.remove(1)
|
||||
>>> s3 = s.remove(2)
|
||||
>>> s2
|
||||
pbag([1, 2])
|
||||
>>> s3
|
||||
pbag([1, 1])
|
||||
"""
|
||||
if element not in self._counts:
|
||||
raise KeyError(element)
|
||||
elif self._counts[element] == 1:
|
||||
newc = self._counts.remove(element)
|
||||
else:
|
||||
newc = self._counts.set(element, self._counts[element] - 1)
|
||||
return PBag(newc)
|
||||
|
||||
def count(self, element):
|
||||
"""
|
||||
Return the number of times an element appears.
|
||||
|
||||
|
||||
>>> pbag([]).count('non-existent')
|
||||
0
|
||||
>>> pbag([1, 1, 2]).count(1)
|
||||
2
|
||||
"""
|
||||
return self._counts.get(element, 0)
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Return the length including duplicates.
|
||||
|
||||
>>> len(pbag([1, 1, 2]))
|
||||
3
|
||||
"""
|
||||
return sum(self._counts.itervalues())
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Return an iterator of all elements, including duplicates.
|
||||
|
||||
>>> list(pbag([1, 1, 2]))
|
||||
[1, 1, 2]
|
||||
>>> list(pbag([1, 2]))
|
||||
[1, 2]
|
||||
"""
|
||||
for elt, count in self._counts.iteritems():
|
||||
for i in range(count):
|
||||
yield elt
|
||||
|
||||
def __contains__(self, elt):
|
||||
"""
|
||||
Check if an element is in the bag.
|
||||
|
||||
>>> 1 in pbag([1, 1, 2])
|
||||
True
|
||||
>>> 0 in pbag([1, 2])
|
||||
False
|
||||
"""
|
||||
return elt in self._counts
|
||||
|
||||
def __repr__(self):
|
||||
return "pbag({0})".format(list(self))
|
||||
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Check if two bags are equivalent, honoring the number of duplicates,
|
||||
and ignoring insertion order.
|
||||
|
||||
>>> pbag([1, 1, 2]) == pbag([1, 2])
|
||||
False
|
||||
>>> pbag([2, 1, 0]) == pbag([0, 1, 2])
|
||||
True
|
||||
"""
|
||||
if type(other) is not PBag:
|
||||
raise TypeError("Can only compare PBag with PBags")
|
||||
return self._counts == other._counts
|
||||
|
||||
def __lt__(self, other):
|
||||
raise TypeError('PBags are not orderable')
|
||||
|
||||
__le__ = __lt__
|
||||
__gt__ = __lt__
|
||||
__ge__ = __lt__
|
||||
|
||||
# Multiset-style operations similar to collections.Counter
|
||||
|
||||
def __add__(self, other):
|
||||
"""
|
||||
Combine elements from two PBags.
|
||||
|
||||
>>> pbag([1, 2, 2]) + pbag([2, 3, 3])
|
||||
pbag([1, 2, 2, 2, 3, 3])
|
||||
"""
|
||||
if not isinstance(other, PBag):
|
||||
return NotImplemented
|
||||
result = self._counts.evolver()
|
||||
for elem, other_count in other._counts.iteritems():
|
||||
result[elem] = self.count(elem) + other_count
|
||||
return PBag(result.persistent())
|
||||
|
||||
def __sub__(self, other):
|
||||
"""
|
||||
Remove elements from one PBag that are present in another.
|
||||
|
||||
>>> pbag([1, 2, 2, 2, 3]) - pbag([2, 3, 3, 4])
|
||||
pbag([1, 2, 2])
|
||||
"""
|
||||
if not isinstance(other, PBag):
|
||||
return NotImplemented
|
||||
result = self._counts.evolver()
|
||||
for elem, other_count in other._counts.iteritems():
|
||||
newcount = self.count(elem) - other_count
|
||||
if newcount > 0:
|
||||
result[elem] = newcount
|
||||
elif elem in self:
|
||||
result.remove(elem)
|
||||
return PBag(result.persistent())
|
||||
|
||||
def __or__(self, other):
|
||||
"""
|
||||
Union: Keep elements that are present in either of two PBags.
|
||||
|
||||
>>> pbag([1, 2, 2, 2]) | pbag([2, 3, 3])
|
||||
pbag([1, 2, 2, 2, 3, 3])
|
||||
"""
|
||||
if not isinstance(other, PBag):
|
||||
return NotImplemented
|
||||
result = self._counts.evolver()
|
||||
for elem, other_count in other._counts.iteritems():
|
||||
count = self.count(elem)
|
||||
newcount = max(count, other_count)
|
||||
result[elem] = newcount
|
||||
return PBag(result.persistent())
|
||||
|
||||
def __and__(self, other):
|
||||
"""
|
||||
Intersection: Only keep elements that are present in both PBags.
|
||||
|
||||
>>> pbag([1, 2, 2, 2]) & pbag([2, 3, 3])
|
||||
pbag([2])
|
||||
"""
|
||||
if not isinstance(other, PBag):
|
||||
return NotImplemented
|
||||
result = pmap().evolver()
|
||||
for elem, count in self._counts.iteritems():
|
||||
newcount = min(count, other.count(elem))
|
||||
if newcount > 0:
|
||||
result[elem] = newcount
|
||||
return PBag(result.persistent())
|
||||
|
||||
def __hash__(self):
|
||||
"""
|
||||
Hash based on value of elements.
|
||||
|
||||
>>> m = pmap({pbag([1, 2]): "it's here!"})
|
||||
>>> m[pbag([2, 1])]
|
||||
"it's here!"
|
||||
>>> pbag([1, 1, 2]) in m
|
||||
False
|
||||
"""
|
||||
return hash(self._counts)
|
||||
|
||||
|
||||
Container.register(PBag)
|
||||
Iterable.register(PBag)
|
||||
Sized.register(PBag)
|
||||
Hashable.register(PBag)
|
||||
|
||||
|
||||
def b(*elements):
|
||||
"""
|
||||
Construct a persistent bag.
|
||||
|
||||
Takes an arbitrary number of arguments to insert into the new persistent
|
||||
bag.
|
||||
|
||||
>>> b(1, 2, 3, 2)
|
||||
pbag([1, 2, 2, 3])
|
||||
"""
|
||||
return pbag(elements)
|
||||
|
||||
|
||||
def pbag(elements):
|
||||
"""
|
||||
Convert an iterable to a persistent bag.
|
||||
|
||||
Takes an iterable with elements to insert.
|
||||
|
||||
>>> pbag([1, 2, 3, 2])
|
||||
pbag([1, 2, 2, 3])
|
||||
"""
|
||||
if not elements:
|
||||
return _EMPTY_PBAG
|
||||
return PBag(reduce(_add_to_counters, elements, pmap()))
|
||||
|
||||
|
||||
_EMPTY_PBAG = PBag(pmap())
|
||||
|
262
.venv/Lib/site-packages/pyrsistent/_pclass.py
Normal file
262
.venv/Lib/site-packages/pyrsistent/_pclass.py
Normal file
@ -0,0 +1,262 @@
|
||||
from pyrsistent._checked_types import (InvariantException, CheckedType, _restore_pickle, store_invariants)
|
||||
from pyrsistent._field_common import (
|
||||
set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants
|
||||
)
|
||||
from pyrsistent._transformations import transform
|
||||
|
||||
|
||||
def _is_pclass(bases):
|
||||
return len(bases) == 1 and bases[0] == CheckedType
|
||||
|
||||
|
||||
class PClassMeta(type):
|
||||
def __new__(mcs, name, bases, dct):
|
||||
set_fields(dct, bases, name='_pclass_fields')
|
||||
store_invariants(dct, bases, '_pclass_invariants', '__invariant__')
|
||||
dct['__slots__'] = ('_pclass_frozen',) + tuple(key for key in dct['_pclass_fields'])
|
||||
|
||||
# There must only be one __weakref__ entry in the inheritance hierarchy,
|
||||
# lets put it on the top level class.
|
||||
if _is_pclass(bases):
|
||||
dct['__slots__'] += ('__weakref__',)
|
||||
|
||||
return super(PClassMeta, mcs).__new__(mcs, name, bases, dct)
|
||||
|
||||
_MISSING_VALUE = object()
|
||||
|
||||
|
||||
def _check_and_set_attr(cls, field, name, value, result, invariant_errors):
|
||||
check_type(cls, field, name, value)
|
||||
is_ok, error_code = field.invariant(value)
|
||||
if not is_ok:
|
||||
invariant_errors.append(error_code)
|
||||
else:
|
||||
setattr(result, name, value)
|
||||
|
||||
|
||||
class PClass(CheckedType, metaclass=PClassMeta):
|
||||
"""
|
||||
A PClass is a python class with a fixed set of specified fields. PClasses are declared as python classes inheriting
|
||||
from PClass. It is defined the same way that PRecords are and behaves like a PRecord in all aspects except that it
|
||||
is not a PMap and hence not a collection but rather a plain Python object.
|
||||
|
||||
|
||||
More documentation and examples of PClass usage is available at https://github.com/tobgu/pyrsistent
|
||||
"""
|
||||
def __new__(cls, **kwargs): # Support *args?
|
||||
result = super(PClass, cls).__new__(cls)
|
||||
factory_fields = kwargs.pop('_factory_fields', None)
|
||||
ignore_extra = kwargs.pop('ignore_extra', None)
|
||||
missing_fields = []
|
||||
invariant_errors = []
|
||||
for name, field in cls._pclass_fields.items():
|
||||
if name in kwargs:
|
||||
if factory_fields is None or name in factory_fields:
|
||||
if is_field_ignore_extra_complaint(PClass, field, ignore_extra):
|
||||
value = field.factory(kwargs[name], ignore_extra=ignore_extra)
|
||||
else:
|
||||
value = field.factory(kwargs[name])
|
||||
else:
|
||||
value = kwargs[name]
|
||||
_check_and_set_attr(cls, field, name, value, result, invariant_errors)
|
||||
del kwargs[name]
|
||||
elif field.initial is not PFIELD_NO_INITIAL:
|
||||
initial = field.initial() if callable(field.initial) else field.initial
|
||||
_check_and_set_attr(
|
||||
cls, field, name, initial, result, invariant_errors)
|
||||
elif field.mandatory:
|
||||
missing_fields.append('{0}.{1}'.format(cls.__name__, name))
|
||||
|
||||
if invariant_errors or missing_fields:
|
||||
raise InvariantException(tuple(invariant_errors), tuple(missing_fields), 'Field invariant failed')
|
||||
|
||||
if kwargs:
|
||||
raise AttributeError("'{0}' are not among the specified fields for {1}".format(
|
||||
', '.join(kwargs), cls.__name__))
|
||||
|
||||
check_global_invariants(result, cls._pclass_invariants)
|
||||
|
||||
result._pclass_frozen = True
|
||||
return result
|
||||
|
||||
def set(self, *args, **kwargs):
|
||||
"""
|
||||
Set a field in the instance. Returns a new instance with the updated value. The original instance remains
|
||||
unmodified. Accepts key-value pairs or single string representing the field name and a value.
|
||||
|
||||
>>> from pyrsistent import PClass, field
|
||||
>>> class AClass(PClass):
|
||||
... x = field()
|
||||
...
|
||||
>>> a = AClass(x=1)
|
||||
>>> a2 = a.set(x=2)
|
||||
>>> a3 = a.set('x', 3)
|
||||
>>> a
|
||||
AClass(x=1)
|
||||
>>> a2
|
||||
AClass(x=2)
|
||||
>>> a3
|
||||
AClass(x=3)
|
||||
"""
|
||||
if args:
|
||||
kwargs[args[0]] = args[1]
|
||||
|
||||
factory_fields = set(kwargs)
|
||||
|
||||
for key in self._pclass_fields:
|
||||
if key not in kwargs:
|
||||
value = getattr(self, key, _MISSING_VALUE)
|
||||
if value is not _MISSING_VALUE:
|
||||
kwargs[key] = value
|
||||
|
||||
return self.__class__(_factory_fields=factory_fields, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def create(cls, kwargs, _factory_fields=None, ignore_extra=False):
|
||||
"""
|
||||
Factory method. Will create a new PClass of the current type and assign the values
|
||||
specified in kwargs.
|
||||
|
||||
:param ignore_extra: A boolean which when set to True will ignore any keys which appear in kwargs that are not
|
||||
in the set of fields on the PClass.
|
||||
"""
|
||||
if isinstance(kwargs, cls):
|
||||
return kwargs
|
||||
|
||||
if ignore_extra:
|
||||
kwargs = {k: kwargs[k] for k in cls._pclass_fields if k in kwargs}
|
||||
|
||||
return cls(_factory_fields=_factory_fields, ignore_extra=ignore_extra, **kwargs)
|
||||
|
||||
def serialize(self, format=None):
|
||||
"""
|
||||
Serialize the current PClass using custom serializer functions for fields where
|
||||
such have been supplied.
|
||||
"""
|
||||
result = {}
|
||||
for name in self._pclass_fields:
|
||||
value = getattr(self, name, _MISSING_VALUE)
|
||||
if value is not _MISSING_VALUE:
|
||||
result[name] = serialize(self._pclass_fields[name].serializer, format, value)
|
||||
|
||||
return result
|
||||
|
||||
def transform(self, *transformations):
|
||||
"""
|
||||
Apply transformations to the currency PClass. For more details on transformations see
|
||||
the documentation for PMap. Transformations on PClasses do not support key matching
|
||||
since the PClass is not a collection. Apart from that the transformations available
|
||||
for other persistent types work as expected.
|
||||
"""
|
||||
return transform(self, transformations)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
for name in self._pclass_fields:
|
||||
if getattr(self, name, _MISSING_VALUE) != getattr(other, name, _MISSING_VALUE):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
return NotImplemented
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
# May want to optimize this by caching the hash somehow
|
||||
return hash(tuple((key, getattr(self, key, _MISSING_VALUE)) for key in self._pclass_fields))
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if getattr(self, '_pclass_frozen', False):
|
||||
raise AttributeError("Can't set attribute, key={0}, value={1}".format(key, value))
|
||||
|
||||
super(PClass, self).__setattr__(key, value)
|
||||
|
||||
def __delattr__(self, key):
|
||||
raise AttributeError("Can't delete attribute, key={0}, use remove()".format(key))
|
||||
|
||||
def _to_dict(self):
|
||||
result = {}
|
||||
for key in self._pclass_fields:
|
||||
value = getattr(self, key, _MISSING_VALUE)
|
||||
if value is not _MISSING_VALUE:
|
||||
result[key] = value
|
||||
|
||||
return result
|
||||
|
||||
def __repr__(self):
|
||||
return "{0}({1})".format(self.__class__.__name__,
|
||||
', '.join('{0}={1}'.format(k, repr(v)) for k, v in self._to_dict().items()))
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
data = dict((key, getattr(self, key)) for key in self._pclass_fields if hasattr(self, key))
|
||||
return _restore_pickle, (self.__class__, data,)
|
||||
|
||||
def evolver(self):
|
||||
"""
|
||||
Returns an evolver for this object.
|
||||
"""
|
||||
return _PClassEvolver(self, self._to_dict())
|
||||
|
||||
def remove(self, name):
|
||||
"""
|
||||
Remove attribute given by name from the current instance. Raises AttributeError if the
|
||||
attribute doesn't exist.
|
||||
"""
|
||||
evolver = self.evolver()
|
||||
del evolver[name]
|
||||
return evolver.persistent()
|
||||
|
||||
|
||||
class _PClassEvolver(object):
|
||||
__slots__ = ('_pclass_evolver_original', '_pclass_evolver_data', '_pclass_evolver_data_is_dirty', '_factory_fields')
|
||||
|
||||
def __init__(self, original, initial_dict):
|
||||
self._pclass_evolver_original = original
|
||||
self._pclass_evolver_data = initial_dict
|
||||
self._pclass_evolver_data_is_dirty = False
|
||||
self._factory_fields = set()
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self._pclass_evolver_data[item]
|
||||
|
||||
def set(self, key, value):
|
||||
if self._pclass_evolver_data.get(key, _MISSING_VALUE) is not value:
|
||||
self._pclass_evolver_data[key] = value
|
||||
self._factory_fields.add(key)
|
||||
self._pclass_evolver_data_is_dirty = True
|
||||
|
||||
return self
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.set(key, value)
|
||||
|
||||
def remove(self, item):
|
||||
if item in self._pclass_evolver_data:
|
||||
del self._pclass_evolver_data[item]
|
||||
self._factory_fields.discard(item)
|
||||
self._pclass_evolver_data_is_dirty = True
|
||||
return self
|
||||
|
||||
raise AttributeError(item)
|
||||
|
||||
def __delitem__(self, item):
|
||||
self.remove(item)
|
||||
|
||||
def persistent(self):
|
||||
if self._pclass_evolver_data_is_dirty:
|
||||
return self._pclass_evolver_original.__class__(_factory_fields=self._factory_fields,
|
||||
**self._pclass_evolver_data)
|
||||
|
||||
return self._pclass_evolver_original
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if key not in self.__slots__:
|
||||
self.set(key, value)
|
||||
else:
|
||||
super(_PClassEvolver, self).__setattr__(key, value)
|
||||
|
||||
def __getattr__(self, item):
|
||||
return self[item]
|
376
.venv/Lib/site-packages/pyrsistent/_pdeque.py
Normal file
376
.venv/Lib/site-packages/pyrsistent/_pdeque.py
Normal file
@ -0,0 +1,376 @@
|
||||
from collections.abc import Sequence, Hashable
|
||||
from itertools import islice, chain
|
||||
from numbers import Integral
|
||||
from pyrsistent._plist import plist
|
||||
|
||||
|
||||
class PDeque(object):
|
||||
"""
|
||||
Persistent double ended queue (deque). Allows quick appends and pops in both ends. Implemented
|
||||
using two persistent lists.
|
||||
|
||||
A maximum length can be specified to create a bounded queue.
|
||||
|
||||
Fully supports the Sequence and Hashable protocols including indexing and slicing but
|
||||
if you need fast random access go for the PVector instead.
|
||||
|
||||
Do not instantiate directly, instead use the factory functions :py:func:`dq` or :py:func:`pdeque` to
|
||||
create an instance.
|
||||
|
||||
Some examples:
|
||||
|
||||
>>> x = pdeque([1, 2, 3])
|
||||
>>> x.left
|
||||
1
|
||||
>>> x.right
|
||||
3
|
||||
>>> x[0] == x.left
|
||||
True
|
||||
>>> x[-1] == x.right
|
||||
True
|
||||
>>> x.pop()
|
||||
pdeque([1, 2])
|
||||
>>> x.pop() == x[:-1]
|
||||
True
|
||||
>>> x.popleft()
|
||||
pdeque([2, 3])
|
||||
>>> x.append(4)
|
||||
pdeque([1, 2, 3, 4])
|
||||
>>> x.appendleft(4)
|
||||
pdeque([4, 1, 2, 3])
|
||||
|
||||
>>> y = pdeque([1, 2, 3], maxlen=3)
|
||||
>>> y.append(4)
|
||||
pdeque([2, 3, 4], maxlen=3)
|
||||
>>> y.appendleft(4)
|
||||
pdeque([4, 1, 2], maxlen=3)
|
||||
"""
|
||||
__slots__ = ('_left_list', '_right_list', '_length', '_maxlen', '__weakref__')
|
||||
|
||||
def __new__(cls, left_list, right_list, length, maxlen=None):
|
||||
instance = super(PDeque, cls).__new__(cls)
|
||||
instance._left_list = left_list
|
||||
instance._right_list = right_list
|
||||
instance._length = length
|
||||
|
||||
if maxlen is not None:
|
||||
if not isinstance(maxlen, Integral):
|
||||
raise TypeError('An integer is required as maxlen')
|
||||
|
||||
if maxlen < 0:
|
||||
raise ValueError("maxlen must be non-negative")
|
||||
|
||||
instance._maxlen = maxlen
|
||||
return instance
|
||||
|
||||
@property
|
||||
def right(self):
|
||||
"""
|
||||
Rightmost element in dqueue.
|
||||
"""
|
||||
return PDeque._tip_from_lists(self._right_list, self._left_list)
|
||||
|
||||
@property
|
||||
def left(self):
|
||||
"""
|
||||
Leftmost element in dqueue.
|
||||
"""
|
||||
return PDeque._tip_from_lists(self._left_list, self._right_list)
|
||||
|
||||
@staticmethod
|
||||
def _tip_from_lists(primary_list, secondary_list):
|
||||
if primary_list:
|
||||
return primary_list.first
|
||||
|
||||
if secondary_list:
|
||||
return secondary_list[-1]
|
||||
|
||||
raise IndexError('No elements in empty deque')
|
||||
|
||||
def __iter__(self):
|
||||
return chain(self._left_list, self._right_list.reverse())
|
||||
|
||||
def __repr__(self):
|
||||
return "pdeque({0}{1})".format(list(self),
|
||||
', maxlen={0}'.format(self._maxlen) if self._maxlen is not None else '')
|
||||
__str__ = __repr__
|
||||
|
||||
@property
|
||||
def maxlen(self):
|
||||
"""
|
||||
Maximum length of the queue.
|
||||
"""
|
||||
return self._maxlen
|
||||
|
||||
def pop(self, count=1):
|
||||
"""
|
||||
Return new deque with rightmost element removed. Popping the empty queue
|
||||
will return the empty queue. A optional count can be given to indicate the
|
||||
number of elements to pop. Popping with a negative index is the same as
|
||||
popleft. Executes in amortized O(k) where k is the number of elements to pop.
|
||||
|
||||
>>> pdeque([1, 2]).pop()
|
||||
pdeque([1])
|
||||
>>> pdeque([1, 2]).pop(2)
|
||||
pdeque([])
|
||||
>>> pdeque([1, 2]).pop(-1)
|
||||
pdeque([2])
|
||||
"""
|
||||
if count < 0:
|
||||
return self.popleft(-count)
|
||||
|
||||
new_right_list, new_left_list = PDeque._pop_lists(self._right_list, self._left_list, count)
|
||||
return PDeque(new_left_list, new_right_list, max(self._length - count, 0), self._maxlen)
|
||||
|
||||
def popleft(self, count=1):
|
||||
"""
|
||||
Return new deque with leftmost element removed. Otherwise functionally
|
||||
equivalent to pop().
|
||||
|
||||
>>> pdeque([1, 2]).popleft()
|
||||
pdeque([2])
|
||||
"""
|
||||
if count < 0:
|
||||
return self.pop(-count)
|
||||
|
||||
new_left_list, new_right_list = PDeque._pop_lists(self._left_list, self._right_list, count)
|
||||
return PDeque(new_left_list, new_right_list, max(self._length - count, 0), self._maxlen)
|
||||
|
||||
@staticmethod
|
||||
def _pop_lists(primary_list, secondary_list, count):
|
||||
new_primary_list = primary_list
|
||||
new_secondary_list = secondary_list
|
||||
|
||||
while count > 0 and (new_primary_list or new_secondary_list):
|
||||
count -= 1
|
||||
if new_primary_list.rest:
|
||||
new_primary_list = new_primary_list.rest
|
||||
elif new_primary_list:
|
||||
new_primary_list = new_secondary_list.reverse()
|
||||
new_secondary_list = plist()
|
||||
else:
|
||||
new_primary_list = new_secondary_list.reverse().rest
|
||||
new_secondary_list = plist()
|
||||
|
||||
return new_primary_list, new_secondary_list
|
||||
|
||||
def _is_empty(self):
|
||||
return not self._left_list and not self._right_list
|
||||
|
||||
def __lt__(self, other):
|
||||
if not isinstance(other, PDeque):
|
||||
return NotImplemented
|
||||
|
||||
return tuple(self) < tuple(other)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, PDeque):
|
||||
return NotImplemented
|
||||
|
||||
if tuple(self) == tuple(other):
|
||||
# Sanity check of the length value since it is redundant (there for performance)
|
||||
assert len(self) == len(other)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def __hash__(self):
|
||||
return hash(tuple(self))
|
||||
|
||||
def __len__(self):
|
||||
return self._length
|
||||
|
||||
def append(self, elem):
|
||||
"""
|
||||
Return new deque with elem as the rightmost element.
|
||||
|
||||
>>> pdeque([1, 2]).append(3)
|
||||
pdeque([1, 2, 3])
|
||||
"""
|
||||
new_left_list, new_right_list, new_length = self._append(self._left_list, self._right_list, elem)
|
||||
return PDeque(new_left_list, new_right_list, new_length, self._maxlen)
|
||||
|
||||
def appendleft(self, elem):
|
||||
"""
|
||||
Return new deque with elem as the leftmost element.
|
||||
|
||||
>>> pdeque([1, 2]).appendleft(3)
|
||||
pdeque([3, 1, 2])
|
||||
"""
|
||||
new_right_list, new_left_list, new_length = self._append(self._right_list, self._left_list, elem)
|
||||
return PDeque(new_left_list, new_right_list, new_length, self._maxlen)
|
||||
|
||||
def _append(self, primary_list, secondary_list, elem):
|
||||
if self._maxlen is not None and self._length == self._maxlen:
|
||||
if self._maxlen == 0:
|
||||
return primary_list, secondary_list, 0
|
||||
new_primary_list, new_secondary_list = PDeque._pop_lists(primary_list, secondary_list, 1)
|
||||
return new_primary_list, new_secondary_list.cons(elem), self._length
|
||||
|
||||
return primary_list, secondary_list.cons(elem), self._length + 1
|
||||
|
||||
@staticmethod
|
||||
def _extend_list(the_list, iterable):
|
||||
count = 0
|
||||
for elem in iterable:
|
||||
the_list = the_list.cons(elem)
|
||||
count += 1
|
||||
|
||||
return the_list, count
|
||||
|
||||
def _extend(self, primary_list, secondary_list, iterable):
|
||||
new_primary_list, extend_count = PDeque._extend_list(primary_list, iterable)
|
||||
new_secondary_list = secondary_list
|
||||
current_len = self._length + extend_count
|
||||
if self._maxlen is not None and current_len > self._maxlen:
|
||||
pop_len = current_len - self._maxlen
|
||||
new_secondary_list, new_primary_list = PDeque._pop_lists(new_secondary_list, new_primary_list, pop_len)
|
||||
extend_count -= pop_len
|
||||
|
||||
return new_primary_list, new_secondary_list, extend_count
|
||||
|
||||
def extend(self, iterable):
|
||||
"""
|
||||
Return new deque with all elements of iterable appended to the right.
|
||||
|
||||
>>> pdeque([1, 2]).extend([3, 4])
|
||||
pdeque([1, 2, 3, 4])
|
||||
"""
|
||||
new_right_list, new_left_list, extend_count = self._extend(self._right_list, self._left_list, iterable)
|
||||
return PDeque(new_left_list, new_right_list, self._length + extend_count, self._maxlen)
|
||||
|
||||
def extendleft(self, iterable):
|
||||
"""
|
||||
Return new deque with all elements of iterable appended to the left.
|
||||
|
||||
NB! The elements will be inserted in reverse order compared to the order in the iterable.
|
||||
|
||||
>>> pdeque([1, 2]).extendleft([3, 4])
|
||||
pdeque([4, 3, 1, 2])
|
||||
"""
|
||||
new_left_list, new_right_list, extend_count = self._extend(self._left_list, self._right_list, iterable)
|
||||
return PDeque(new_left_list, new_right_list, self._length + extend_count, self._maxlen)
|
||||
|
||||
def count(self, elem):
|
||||
"""
|
||||
Return the number of elements equal to elem present in the queue
|
||||
|
||||
>>> pdeque([1, 2, 1]).count(1)
|
||||
2
|
||||
"""
|
||||
return self._left_list.count(elem) + self._right_list.count(elem)
|
||||
|
||||
def remove(self, elem):
|
||||
"""
|
||||
Return new deque with first element from left equal to elem removed. If no such element is found
|
||||
a ValueError is raised.
|
||||
|
||||
>>> pdeque([2, 1, 2]).remove(2)
|
||||
pdeque([1, 2])
|
||||
"""
|
||||
try:
|
||||
return PDeque(self._left_list.remove(elem), self._right_list, self._length - 1)
|
||||
except ValueError:
|
||||
# Value not found in left list, try the right list
|
||||
try:
|
||||
# This is severely inefficient with a double reverse, should perhaps implement a remove_last()?
|
||||
return PDeque(self._left_list,
|
||||
self._right_list.reverse().remove(elem).reverse(), self._length - 1)
|
||||
except ValueError as e:
|
||||
raise ValueError('{0} not found in PDeque'.format(elem)) from e
|
||||
|
||||
def reverse(self):
|
||||
"""
|
||||
Return reversed deque.
|
||||
|
||||
>>> pdeque([1, 2, 3]).reverse()
|
||||
pdeque([3, 2, 1])
|
||||
|
||||
Also supports the standard python reverse function.
|
||||
|
||||
>>> reversed(pdeque([1, 2, 3]))
|
||||
pdeque([3, 2, 1])
|
||||
"""
|
||||
return PDeque(self._right_list, self._left_list, self._length)
|
||||
__reversed__ = reverse
|
||||
|
||||
def rotate(self, steps):
|
||||
"""
|
||||
Return deque with elements rotated steps steps.
|
||||
|
||||
>>> x = pdeque([1, 2, 3])
|
||||
>>> x.rotate(1)
|
||||
pdeque([3, 1, 2])
|
||||
>>> x.rotate(-2)
|
||||
pdeque([3, 1, 2])
|
||||
"""
|
||||
popped_deque = self.pop(steps)
|
||||
if steps >= 0:
|
||||
return popped_deque.extendleft(islice(self.reverse(), steps))
|
||||
|
||||
return popped_deque.extend(islice(self, -steps))
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return pdeque, (list(self), self._maxlen)
|
||||
|
||||
def __getitem__(self, index):
|
||||
if isinstance(index, slice):
|
||||
if index.step is not None and index.step != 1:
|
||||
# Too difficult, no structural sharing possible
|
||||
return pdeque(tuple(self)[index], maxlen=self._maxlen)
|
||||
|
||||
result = self
|
||||
if index.start is not None:
|
||||
result = result.popleft(index.start % self._length)
|
||||
if index.stop is not None:
|
||||
result = result.pop(self._length - (index.stop % self._length))
|
||||
|
||||
return result
|
||||
|
||||
if not isinstance(index, Integral):
|
||||
raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
|
||||
|
||||
if index >= 0:
|
||||
return self.popleft(index).left
|
||||
|
||||
shifted = len(self) + index
|
||||
if shifted < 0:
|
||||
raise IndexError(
|
||||
"pdeque index {0} out of range {1}".format(index, len(self)),
|
||||
)
|
||||
return self.popleft(shifted).left
|
||||
|
||||
index = Sequence.index
|
||||
|
||||
Sequence.register(PDeque)
|
||||
Hashable.register(PDeque)
|
||||
|
||||
|
||||
def pdeque(iterable=(), maxlen=None):
|
||||
"""
|
||||
Return deque containing the elements of iterable. If maxlen is specified then
|
||||
len(iterable) - maxlen elements are discarded from the left to if len(iterable) > maxlen.
|
||||
|
||||
>>> pdeque([1, 2, 3])
|
||||
pdeque([1, 2, 3])
|
||||
>>> pdeque([1, 2, 3, 4], maxlen=2)
|
||||
pdeque([3, 4], maxlen=2)
|
||||
"""
|
||||
t = tuple(iterable)
|
||||
if maxlen is not None:
|
||||
t = t[-maxlen:]
|
||||
length = len(t)
|
||||
pivot = int(length / 2)
|
||||
left = plist(t[:pivot])
|
||||
right = plist(t[pivot:], reverse=True)
|
||||
return PDeque(left, right, length, maxlen)
|
||||
|
||||
def dq(*elements):
|
||||
"""
|
||||
Return deque containing all arguments.
|
||||
|
||||
>>> dq(1, 2, 3)
|
||||
pdeque([1, 2, 3])
|
||||
"""
|
||||
return pdeque(elements)
|
313
.venv/Lib/site-packages/pyrsistent/_plist.py
Normal file
313
.venv/Lib/site-packages/pyrsistent/_plist.py
Normal file
@ -0,0 +1,313 @@
|
||||
from collections.abc import Sequence, Hashable
|
||||
from numbers import Integral
|
||||
from functools import reduce
|
||||
|
||||
|
||||
class _PListBuilder(object):
|
||||
"""
|
||||
Helper class to allow construction of a list without
|
||||
having to reverse it in the end.
|
||||
"""
|
||||
__slots__ = ('_head', '_tail')
|
||||
|
||||
def __init__(self):
|
||||
self._head = _EMPTY_PLIST
|
||||
self._tail = _EMPTY_PLIST
|
||||
|
||||
def _append(self, elem, constructor):
|
||||
if not self._tail:
|
||||
self._head = constructor(elem)
|
||||
self._tail = self._head
|
||||
else:
|
||||
self._tail.rest = constructor(elem)
|
||||
self._tail = self._tail.rest
|
||||
|
||||
return self._head
|
||||
|
||||
def append_elem(self, elem):
|
||||
return self._append(elem, lambda e: PList(e, _EMPTY_PLIST))
|
||||
|
||||
def append_plist(self, pl):
|
||||
return self._append(pl, lambda l: l)
|
||||
|
||||
def build(self):
|
||||
return self._head
|
||||
|
||||
|
||||
class _PListBase(object):
|
||||
__slots__ = ('__weakref__',)
|
||||
|
||||
# Selected implementations can be taken straight from the Sequence
|
||||
# class, other are less suitable. Especially those that work with
|
||||
# index lookups.
|
||||
count = Sequence.count
|
||||
index = Sequence.index
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return plist, (list(self),)
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Return the length of the list, computed by traversing it.
|
||||
|
||||
This is obviously O(n) but with the current implementation
|
||||
where a list is also a node the overhead of storing the length
|
||||
in every node would be quite significant.
|
||||
"""
|
||||
return sum(1 for _ in self)
|
||||
|
||||
def __repr__(self):
|
||||
return "plist({0})".format(list(self))
|
||||
__str__ = __repr__
|
||||
|
||||
def cons(self, elem):
|
||||
"""
|
||||
Return a new list with elem inserted as new head.
|
||||
|
||||
>>> plist([1, 2]).cons(3)
|
||||
plist([3, 1, 2])
|
||||
"""
|
||||
return PList(elem, self)
|
||||
|
||||
def mcons(self, iterable):
|
||||
"""
|
||||
Return a new list with all elements of iterable repeatedly cons:ed to the current list.
|
||||
NB! The elements will be inserted in the reverse order of the iterable.
|
||||
Runs in O(len(iterable)).
|
||||
|
||||
>>> plist([1, 2]).mcons([3, 4])
|
||||
plist([4, 3, 1, 2])
|
||||
"""
|
||||
head = self
|
||||
for elem in iterable:
|
||||
head = head.cons(elem)
|
||||
|
||||
return head
|
||||
|
||||
def reverse(self):
|
||||
"""
|
||||
Return a reversed version of list. Runs in O(n) where n is the length of the list.
|
||||
|
||||
>>> plist([1, 2, 3]).reverse()
|
||||
plist([3, 2, 1])
|
||||
|
||||
Also supports the standard reversed function.
|
||||
|
||||
>>> reversed(plist([1, 2, 3]))
|
||||
plist([3, 2, 1])
|
||||
"""
|
||||
result = plist()
|
||||
head = self
|
||||
while head:
|
||||
result = result.cons(head.first)
|
||||
head = head.rest
|
||||
|
||||
return result
|
||||
__reversed__ = reverse
|
||||
|
||||
def split(self, index):
|
||||
"""
|
||||
Spilt the list at position specified by index. Returns a tuple containing the
|
||||
list up until index and the list after the index. Runs in O(index).
|
||||
|
||||
>>> plist([1, 2, 3, 4]).split(2)
|
||||
(plist([1, 2]), plist([3, 4]))
|
||||
"""
|
||||
lb = _PListBuilder()
|
||||
right_list = self
|
||||
i = 0
|
||||
while right_list and i < index:
|
||||
lb.append_elem(right_list.first)
|
||||
right_list = right_list.rest
|
||||
i += 1
|
||||
|
||||
if not right_list:
|
||||
# Just a small optimization in the cases where no split occurred
|
||||
return self, _EMPTY_PLIST
|
||||
|
||||
return lb.build(), right_list
|
||||
|
||||
def __iter__(self):
|
||||
li = self
|
||||
while li:
|
||||
yield li.first
|
||||
li = li.rest
|
||||
|
||||
def __lt__(self, other):
|
||||
if not isinstance(other, _PListBase):
|
||||
return NotImplemented
|
||||
|
||||
return tuple(self) < tuple(other)
|
||||
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Traverses the lists, checking equality of elements.
|
||||
|
||||
This is an O(n) operation, but preserves the standard semantics of list equality.
|
||||
"""
|
||||
if not isinstance(other, _PListBase):
|
||||
return NotImplemented
|
||||
|
||||
self_head = self
|
||||
other_head = other
|
||||
while self_head and other_head:
|
||||
if not self_head.first == other_head.first:
|
||||
return False
|
||||
self_head = self_head.rest
|
||||
other_head = other_head.rest
|
||||
|
||||
return not self_head and not other_head
|
||||
|
||||
def __getitem__(self, index):
|
||||
# Don't use this this data structure if you plan to do a lot of indexing, it is
|
||||
# very inefficient! Use a PVector instead!
|
||||
|
||||
if isinstance(index, slice):
|
||||
if index.start is not None and index.stop is None and (index.step is None or index.step == 1):
|
||||
return self._drop(index.start)
|
||||
|
||||
# Take the easy way out for all other slicing cases, not much structural reuse possible anyway
|
||||
return plist(tuple(self)[index])
|
||||
|
||||
if not isinstance(index, Integral):
|
||||
raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
|
||||
|
||||
if index < 0:
|
||||
# NB: O(n)!
|
||||
index += len(self)
|
||||
|
||||
try:
|
||||
return self._drop(index).first
|
||||
except AttributeError as e:
|
||||
raise IndexError("PList index out of range") from e
|
||||
|
||||
def _drop(self, count):
|
||||
if count < 0:
|
||||
raise IndexError("PList index out of range")
|
||||
|
||||
head = self
|
||||
while count > 0:
|
||||
head = head.rest
|
||||
count -= 1
|
||||
|
||||
return head
|
||||
|
||||
def __hash__(self):
|
||||
return hash(tuple(self))
|
||||
|
||||
def remove(self, elem):
|
||||
"""
|
||||
Return new list with first element equal to elem removed. O(k) where k is the position
|
||||
of the element that is removed.
|
||||
|
||||
Raises ValueError if no matching element is found.
|
||||
|
||||
>>> plist([1, 2, 1]).remove(1)
|
||||
plist([2, 1])
|
||||
"""
|
||||
|
||||
builder = _PListBuilder()
|
||||
head = self
|
||||
while head:
|
||||
if head.first == elem:
|
||||
return builder.append_plist(head.rest)
|
||||
|
||||
builder.append_elem(head.first)
|
||||
head = head.rest
|
||||
|
||||
raise ValueError('{0} not found in PList'.format(elem))
|
||||
|
||||
|
||||
class PList(_PListBase):
|
||||
"""
|
||||
Classical Lisp style singly linked list. Adding elements to the head using cons is O(1).
|
||||
Element access is O(k) where k is the position of the element in the list. Taking the
|
||||
length of the list is O(n).
|
||||
|
||||
Fully supports the Sequence and Hashable protocols including indexing and slicing but
|
||||
if you need fast random access go for the PVector instead.
|
||||
|
||||
Do not instantiate directly, instead use the factory functions :py:func:`l` or :py:func:`plist` to
|
||||
create an instance.
|
||||
|
||||
Some examples:
|
||||
|
||||
>>> x = plist([1, 2])
|
||||
>>> y = x.cons(3)
|
||||
>>> x
|
||||
plist([1, 2])
|
||||
>>> y
|
||||
plist([3, 1, 2])
|
||||
>>> y.first
|
||||
3
|
||||
>>> y.rest == x
|
||||
True
|
||||
>>> y[:2]
|
||||
plist([3, 1])
|
||||
"""
|
||||
__slots__ = ('first', 'rest')
|
||||
|
||||
def __new__(cls, first, rest):
|
||||
instance = super(PList, cls).__new__(cls)
|
||||
instance.first = first
|
||||
instance.rest = rest
|
||||
return instance
|
||||
|
||||
def __bool__(self):
|
||||
return True
|
||||
__nonzero__ = __bool__
|
||||
|
||||
|
||||
Sequence.register(PList)
|
||||
Hashable.register(PList)
|
||||
|
||||
|
||||
class _EmptyPList(_PListBase):
|
||||
__slots__ = ()
|
||||
|
||||
def __bool__(self):
|
||||
return False
|
||||
__nonzero__ = __bool__
|
||||
|
||||
@property
|
||||
def first(self):
|
||||
raise AttributeError("Empty PList has no first")
|
||||
|
||||
@property
|
||||
def rest(self):
|
||||
return self
|
||||
|
||||
|
||||
Sequence.register(_EmptyPList)
|
||||
Hashable.register(_EmptyPList)
|
||||
|
||||
_EMPTY_PLIST = _EmptyPList()
|
||||
|
||||
|
||||
def plist(iterable=(), reverse=False):
|
||||
"""
|
||||
Creates a new persistent list containing all elements of iterable.
|
||||
Optional parameter reverse specifies if the elements should be inserted in
|
||||
reverse order or not.
|
||||
|
||||
>>> plist([1, 2, 3])
|
||||
plist([1, 2, 3])
|
||||
>>> plist([1, 2, 3], reverse=True)
|
||||
plist([3, 2, 1])
|
||||
"""
|
||||
if not reverse:
|
||||
iterable = list(iterable)
|
||||
iterable.reverse()
|
||||
|
||||
return reduce(lambda pl, elem: pl.cons(elem), iterable, _EMPTY_PLIST)
|
||||
|
||||
|
||||
def l(*elements):
|
||||
"""
|
||||
Creates a new persistent list containing all arguments.
|
||||
|
||||
>>> l(1, 2, 3)
|
||||
plist([1, 2, 3])
|
||||
"""
|
||||
return plist(elements)
|
461
.venv/Lib/site-packages/pyrsistent/_pmap.py
Normal file
461
.venv/Lib/site-packages/pyrsistent/_pmap.py
Normal file
@ -0,0 +1,461 @@
|
||||
from collections.abc import Mapping, Hashable
|
||||
from itertools import chain
|
||||
from pyrsistent._pvector import pvector
|
||||
from pyrsistent._transformations import transform
|
||||
|
||||
|
||||
class PMap(object):
|
||||
"""
|
||||
Persistent map/dict. Tries to follow the same naming conventions as the built in dict where feasible.
|
||||
|
||||
Do not instantiate directly, instead use the factory functions :py:func:`m` or :py:func:`pmap` to
|
||||
create an instance.
|
||||
|
||||
Was originally written as a very close copy of the Clojure equivalent but was later rewritten to closer
|
||||
re-assemble the python dict. This means that a sparse vector (a PVector) of buckets is used. The keys are
|
||||
hashed and the elements inserted at position hash % len(bucket_vector). Whenever the map size exceeds 2/3 of
|
||||
the containing vectors size the map is reallocated to a vector of double the size. This is done to avoid
|
||||
excessive hash collisions.
|
||||
|
||||
This structure corresponds most closely to the built in dict type and is intended as a replacement. Where the
|
||||
semantics are the same (more or less) the same function names have been used but for some cases it is not possible,
|
||||
for example assignments and deletion of values.
|
||||
|
||||
PMap implements the Mapping protocol and is Hashable. It also supports dot-notation for
|
||||
element access.
|
||||
|
||||
Random access and insert is log32(n) where n is the size of the map.
|
||||
|
||||
The following are examples of some common operations on persistent maps
|
||||
|
||||
>>> m1 = m(a=1, b=3)
|
||||
>>> m2 = m1.set('c', 3)
|
||||
>>> m3 = m2.remove('a')
|
||||
>>> m1 == {'a': 1, 'b': 3}
|
||||
True
|
||||
>>> m2 == {'a': 1, 'b': 3, 'c': 3}
|
||||
True
|
||||
>>> m3 == {'b': 3, 'c': 3}
|
||||
True
|
||||
>>> m3['c']
|
||||
3
|
||||
>>> m3.c
|
||||
3
|
||||
"""
|
||||
__slots__ = ('_size', '_buckets', '__weakref__', '_cached_hash')
|
||||
|
||||
def __new__(cls, size, buckets):
|
||||
self = super(PMap, cls).__new__(cls)
|
||||
self._size = size
|
||||
self._buckets = buckets
|
||||
return self
|
||||
|
||||
@staticmethod
|
||||
def _get_bucket(buckets, key):
|
||||
index = hash(key) % len(buckets)
|
||||
bucket = buckets[index]
|
||||
return index, bucket
|
||||
|
||||
@staticmethod
|
||||
def _getitem(buckets, key):
|
||||
_, bucket = PMap._get_bucket(buckets, key)
|
||||
if bucket:
|
||||
for k, v in bucket:
|
||||
if k == key:
|
||||
return v
|
||||
|
||||
raise KeyError(key)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return PMap._getitem(self._buckets, key)
|
||||
|
||||
@staticmethod
|
||||
def _contains(buckets, key):
|
||||
_, bucket = PMap._get_bucket(buckets, key)
|
||||
if bucket:
|
||||
for k, _ in bucket:
|
||||
if k == key:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
return False
|
||||
|
||||
def __contains__(self, key):
|
||||
return self._contains(self._buckets, key)
|
||||
|
||||
get = Mapping.get
|
||||
|
||||
def __iter__(self):
|
||||
return self.iterkeys()
|
||||
|
||||
def __getattr__(self, key):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError as e:
|
||||
raise AttributeError(
|
||||
"{0} has no attribute '{1}'".format(type(self).__name__, key)
|
||||
) from e
|
||||
|
||||
def iterkeys(self):
|
||||
for k, _ in self.iteritems():
|
||||
yield k
|
||||
|
||||
# These are more efficient implementations compared to the original
|
||||
# methods that are based on the keys iterator and then calls the
|
||||
# accessor functions to access the value for the corresponding key
|
||||
def itervalues(self):
|
||||
for _, v in self.iteritems():
|
||||
yield v
|
||||
|
||||
def iteritems(self):
|
||||
for bucket in self._buckets:
|
||||
if bucket:
|
||||
for k, v in bucket:
|
||||
yield k, v
|
||||
|
||||
def values(self):
|
||||
return pvector(self.itervalues())
|
||||
|
||||
def keys(self):
|
||||
return pvector(self.iterkeys())
|
||||
|
||||
def items(self):
|
||||
return pvector(self.iteritems())
|
||||
|
||||
def __len__(self):
|
||||
return self._size
|
||||
|
||||
def __repr__(self):
|
||||
return 'pmap({0})'.format(str(dict(self)))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
return True
|
||||
if not isinstance(other, Mapping):
|
||||
return NotImplemented
|
||||
if len(self) != len(other):
|
||||
return False
|
||||
if isinstance(other, PMap):
|
||||
if (hasattr(self, '_cached_hash') and hasattr(other, '_cached_hash')
|
||||
and self._cached_hash != other._cached_hash):
|
||||
return False
|
||||
if self._buckets == other._buckets:
|
||||
return True
|
||||
return dict(self.iteritems()) == dict(other.iteritems())
|
||||
elif isinstance(other, dict):
|
||||
return dict(self.iteritems()) == other
|
||||
return dict(self.iteritems()) == dict(other.items())
|
||||
|
||||
__ne__ = Mapping.__ne__
|
||||
|
||||
def __lt__(self, other):
|
||||
raise TypeError('PMaps are not orderable')
|
||||
|
||||
__le__ = __lt__
|
||||
__gt__ = __lt__
|
||||
__ge__ = __lt__
|
||||
|
||||
def __str__(self):
|
||||
return self.__repr__()
|
||||
|
||||
def __hash__(self):
|
||||
if not hasattr(self, '_cached_hash'):
|
||||
self._cached_hash = hash(frozenset(self.iteritems()))
|
||||
return self._cached_hash
|
||||
|
||||
def set(self, key, val):
|
||||
"""
|
||||
Return a new PMap with key and val inserted.
|
||||
|
||||
>>> m1 = m(a=1, b=2)
|
||||
>>> m2 = m1.set('a', 3)
|
||||
>>> m3 = m1.set('c' ,4)
|
||||
>>> m1 == {'a': 1, 'b': 2}
|
||||
True
|
||||
>>> m2 == {'a': 3, 'b': 2}
|
||||
True
|
||||
>>> m3 == {'a': 1, 'b': 2, 'c': 4}
|
||||
True
|
||||
"""
|
||||
return self.evolver().set(key, val).persistent()
|
||||
|
||||
def remove(self, key):
|
||||
"""
|
||||
Return a new PMap without the element specified by key. Raises KeyError if the element
|
||||
is not present.
|
||||
|
||||
>>> m1 = m(a=1, b=2)
|
||||
>>> m1.remove('a')
|
||||
pmap({'b': 2})
|
||||
"""
|
||||
return self.evolver().remove(key).persistent()
|
||||
|
||||
def discard(self, key):
|
||||
"""
|
||||
Return a new PMap without the element specified by key. Returns reference to itself
|
||||
if element is not present.
|
||||
|
||||
>>> m1 = m(a=1, b=2)
|
||||
>>> m1.discard('a')
|
||||
pmap({'b': 2})
|
||||
>>> m1 is m1.discard('c')
|
||||
True
|
||||
"""
|
||||
try:
|
||||
return self.remove(key)
|
||||
except KeyError:
|
||||
return self
|
||||
|
||||
def update(self, *maps):
|
||||
"""
|
||||
Return a new PMap with the items in Mappings inserted. If the same key is present in multiple
|
||||
maps the rightmost (last) value is inserted.
|
||||
|
||||
>>> m1 = m(a=1, b=2)
|
||||
>>> m1.update(m(a=2, c=3), {'a': 17, 'd': 35}) == {'a': 17, 'b': 2, 'c': 3, 'd': 35}
|
||||
True
|
||||
"""
|
||||
return self.update_with(lambda l, r: r, *maps)
|
||||
|
||||
def update_with(self, update_fn, *maps):
|
||||
"""
|
||||
Return a new PMap with the items in Mappings maps inserted. If the same key is present in multiple
|
||||
maps the values will be merged using merge_fn going from left to right.
|
||||
|
||||
>>> from operator import add
|
||||
>>> m1 = m(a=1, b=2)
|
||||
>>> m1.update_with(add, m(a=2)) == {'a': 3, 'b': 2}
|
||||
True
|
||||
|
||||
The reverse behaviour of the regular merge. Keep the leftmost element instead of the rightmost.
|
||||
|
||||
>>> m1 = m(a=1)
|
||||
>>> m1.update_with(lambda l, r: l, m(a=2), {'a':3})
|
||||
pmap({'a': 1})
|
||||
"""
|
||||
evolver = self.evolver()
|
||||
for map in maps:
|
||||
for key, value in map.items():
|
||||
evolver.set(key, update_fn(evolver[key], value) if key in evolver else value)
|
||||
|
||||
return evolver.persistent()
|
||||
|
||||
def __add__(self, other):
|
||||
return self.update(other)
|
||||
|
||||
__or__ = __add__
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return pmap, (dict(self),)
|
||||
|
||||
def transform(self, *transformations):
|
||||
"""
|
||||
Transform arbitrarily complex combinations of PVectors and PMaps. A transformation
|
||||
consists of two parts. One match expression that specifies which elements to transform
|
||||
and one transformation function that performs the actual transformation.
|
||||
|
||||
>>> from pyrsistent import freeze, ny
|
||||
>>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
|
||||
... {'author': 'Steve', 'content': 'A slightly longer article'}],
|
||||
... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
|
||||
>>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
|
||||
>>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
|
||||
>>> very_short_news.articles[0].content
|
||||
'A short article'
|
||||
>>> very_short_news.articles[1].content
|
||||
'A slightly long...'
|
||||
|
||||
When nothing has been transformed the original data structure is kept
|
||||
|
||||
>>> short_news is news_paper
|
||||
True
|
||||
>>> very_short_news is news_paper
|
||||
False
|
||||
>>> very_short_news.articles[0] is news_paper.articles[0]
|
||||
True
|
||||
"""
|
||||
return transform(self, transformations)
|
||||
|
||||
def copy(self):
|
||||
return self
|
||||
|
||||
class _Evolver(object):
|
||||
__slots__ = ('_buckets_evolver', '_size', '_original_pmap')
|
||||
|
||||
def __init__(self, original_pmap):
|
||||
self._original_pmap = original_pmap
|
||||
self._buckets_evolver = original_pmap._buckets.evolver()
|
||||
self._size = original_pmap._size
|
||||
|
||||
def __getitem__(self, key):
|
||||
return PMap._getitem(self._buckets_evolver, key)
|
||||
|
||||
def __setitem__(self, key, val):
|
||||
self.set(key, val)
|
||||
|
||||
def set(self, key, val):
|
||||
if len(self._buckets_evolver) < 0.67 * self._size:
|
||||
self._reallocate(2 * len(self._buckets_evolver))
|
||||
|
||||
kv = (key, val)
|
||||
index, bucket = PMap._get_bucket(self._buckets_evolver, key)
|
||||
if bucket:
|
||||
for k, v in bucket:
|
||||
if k == key:
|
||||
if v is not val:
|
||||
new_bucket = [(k2, v2) if k2 != k else (k2, val) for k2, v2 in bucket]
|
||||
self._buckets_evolver[index] = new_bucket
|
||||
|
||||
return self
|
||||
|
||||
new_bucket = [kv]
|
||||
new_bucket.extend(bucket)
|
||||
self._buckets_evolver[index] = new_bucket
|
||||
self._size += 1
|
||||
else:
|
||||
self._buckets_evolver[index] = [kv]
|
||||
self._size += 1
|
||||
|
||||
return self
|
||||
|
||||
def _reallocate(self, new_size):
|
||||
new_list = new_size * [None]
|
||||
buckets = self._buckets_evolver.persistent()
|
||||
for k, v in chain.from_iterable(x for x in buckets if x):
|
||||
index = hash(k) % new_size
|
||||
if new_list[index]:
|
||||
new_list[index].append((k, v))
|
||||
else:
|
||||
new_list[index] = [(k, v)]
|
||||
|
||||
# A reallocation should always result in a dirty buckets evolver to avoid
|
||||
# possible loss of elements when doing the reallocation.
|
||||
self._buckets_evolver = pvector().evolver()
|
||||
self._buckets_evolver.extend(new_list)
|
||||
|
||||
def is_dirty(self):
|
||||
return self._buckets_evolver.is_dirty()
|
||||
|
||||
def persistent(self):
|
||||
if self.is_dirty():
|
||||
self._original_pmap = PMap(self._size, self._buckets_evolver.persistent())
|
||||
|
||||
return self._original_pmap
|
||||
|
||||
def __len__(self):
|
||||
return self._size
|
||||
|
||||
def __contains__(self, key):
|
||||
return PMap._contains(self._buckets_evolver, key)
|
||||
|
||||
def __delitem__(self, key):
|
||||
self.remove(key)
|
||||
|
||||
def remove(self, key):
|
||||
index, bucket = PMap._get_bucket(self._buckets_evolver, key)
|
||||
|
||||
if bucket:
|
||||
new_bucket = [(k, v) for (k, v) in bucket if k != key]
|
||||
if len(bucket) > len(new_bucket):
|
||||
self._buckets_evolver[index] = new_bucket if new_bucket else None
|
||||
self._size -= 1
|
||||
return self
|
||||
|
||||
raise KeyError('{0}'.format(key))
|
||||
|
||||
def evolver(self):
|
||||
"""
|
||||
Create a new evolver for this pmap. For a discussion on evolvers in general see the
|
||||
documentation for the pvector evolver.
|
||||
|
||||
Create the evolver and perform various mutating updates to it:
|
||||
|
||||
>>> m1 = m(a=1, b=2)
|
||||
>>> e = m1.evolver()
|
||||
>>> e['c'] = 3
|
||||
>>> len(e)
|
||||
3
|
||||
>>> del e['a']
|
||||
|
||||
The underlying pmap remains the same:
|
||||
|
||||
>>> m1 == {'a': 1, 'b': 2}
|
||||
True
|
||||
|
||||
The changes are kept in the evolver. An updated pmap can be created using the
|
||||
persistent() function on the evolver.
|
||||
|
||||
>>> m2 = e.persistent()
|
||||
>>> m2 == {'b': 2, 'c': 3}
|
||||
True
|
||||
|
||||
The new pmap will share data with the original pmap in the same way that would have
|
||||
been done if only using operations on the pmap.
|
||||
"""
|
||||
return self._Evolver(self)
|
||||
|
||||
Mapping.register(PMap)
|
||||
Hashable.register(PMap)
|
||||
|
||||
|
||||
def _turbo_mapping(initial, pre_size):
|
||||
if pre_size:
|
||||
size = pre_size
|
||||
else:
|
||||
try:
|
||||
size = 2 * len(initial) or 8
|
||||
except Exception:
|
||||
# Guess we can't figure out the length. Give up on length hinting,
|
||||
# we can always reallocate later.
|
||||
size = 8
|
||||
|
||||
buckets = size * [None]
|
||||
|
||||
if not isinstance(initial, Mapping):
|
||||
# Make a dictionary of the initial data if it isn't already,
|
||||
# that will save us some job further down since we can assume no
|
||||
# key collisions
|
||||
initial = dict(initial)
|
||||
|
||||
for k, v in initial.items():
|
||||
h = hash(k)
|
||||
index = h % size
|
||||
bucket = buckets[index]
|
||||
|
||||
if bucket:
|
||||
bucket.append((k, v))
|
||||
else:
|
||||
buckets[index] = [(k, v)]
|
||||
|
||||
return PMap(len(initial), pvector().extend(buckets))
|
||||
|
||||
|
||||
_EMPTY_PMAP = _turbo_mapping({}, 0)
|
||||
|
||||
|
||||
def pmap(initial={}, pre_size=0):
|
||||
"""
|
||||
Create new persistent map, inserts all elements in initial into the newly created map.
|
||||
The optional argument pre_size may be used to specify an initial size of the underlying bucket vector. This
|
||||
may have a positive performance impact in the cases where you know beforehand that a large number of elements
|
||||
will be inserted into the map eventually since it will reduce the number of reallocations required.
|
||||
|
||||
>>> pmap({'a': 13, 'b': 14}) == {'a': 13, 'b': 14}
|
||||
True
|
||||
"""
|
||||
if not initial and pre_size == 0:
|
||||
return _EMPTY_PMAP
|
||||
|
||||
return _turbo_mapping(initial, pre_size)
|
||||
|
||||
|
||||
def m(**kwargs):
|
||||
"""
|
||||
Creates a new persistent map. Inserts all key value arguments into the newly created map.
|
||||
|
||||
>>> m(a=13, b=14) == {'a': 13, 'b': 14}
|
||||
True
|
||||
"""
|
||||
return pmap(kwargs)
|
167
.venv/Lib/site-packages/pyrsistent/_precord.py
Normal file
167
.venv/Lib/site-packages/pyrsistent/_precord.py
Normal file
@ -0,0 +1,167 @@
|
||||
from pyrsistent._checked_types import CheckedType, _restore_pickle, InvariantException, store_invariants
|
||||
from pyrsistent._field_common import (
|
||||
set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants
|
||||
)
|
||||
from pyrsistent._pmap import PMap, pmap
|
||||
|
||||
|
||||
class _PRecordMeta(type):
|
||||
def __new__(mcs, name, bases, dct):
|
||||
set_fields(dct, bases, name='_precord_fields')
|
||||
store_invariants(dct, bases, '_precord_invariants', '__invariant__')
|
||||
|
||||
dct['_precord_mandatory_fields'] = \
|
||||
set(name for name, field in dct['_precord_fields'].items() if field.mandatory)
|
||||
|
||||
dct['_precord_initial_values'] = \
|
||||
dict((k, field.initial) for k, field in dct['_precord_fields'].items() if field.initial is not PFIELD_NO_INITIAL)
|
||||
|
||||
|
||||
dct['__slots__'] = ()
|
||||
|
||||
return super(_PRecordMeta, mcs).__new__(mcs, name, bases, dct)
|
||||
|
||||
|
||||
class PRecord(PMap, CheckedType, metaclass=_PRecordMeta):
|
||||
"""
|
||||
A PRecord is a PMap with a fixed set of specified fields. Records are declared as python classes inheriting
|
||||
from PRecord. Because it is a PMap it has full support for all Mapping methods such as iteration and element
|
||||
access using subscript notation.
|
||||
|
||||
More documentation and examples of PRecord usage is available at https://github.com/tobgu/pyrsistent
|
||||
"""
|
||||
def __new__(cls, **kwargs):
|
||||
# Hack total! If these two special attributes exist that means we can create
|
||||
# ourselves. Otherwise we need to go through the Evolver to create the structures
|
||||
# for us.
|
||||
if '_precord_size' in kwargs and '_precord_buckets' in kwargs:
|
||||
return super(PRecord, cls).__new__(cls, kwargs['_precord_size'], kwargs['_precord_buckets'])
|
||||
|
||||
factory_fields = kwargs.pop('_factory_fields', None)
|
||||
ignore_extra = kwargs.pop('_ignore_extra', False)
|
||||
|
||||
initial_values = kwargs
|
||||
if cls._precord_initial_values:
|
||||
initial_values = dict((k, v() if callable(v) else v)
|
||||
for k, v in cls._precord_initial_values.items())
|
||||
initial_values.update(kwargs)
|
||||
|
||||
e = _PRecordEvolver(cls, pmap(pre_size=len(cls._precord_fields)), _factory_fields=factory_fields, _ignore_extra=ignore_extra)
|
||||
for k, v in initial_values.items():
|
||||
e[k] = v
|
||||
|
||||
return e.persistent()
|
||||
|
||||
def set(self, *args, **kwargs):
|
||||
"""
|
||||
Set a field in the record. This set function differs slightly from that in the PMap
|
||||
class. First of all it accepts key-value pairs. Second it accepts multiple key-value
|
||||
pairs to perform one, atomic, update of multiple fields.
|
||||
"""
|
||||
|
||||
# The PRecord set() can accept kwargs since all fields that have been declared are
|
||||
# valid python identifiers. Also allow multiple fields to be set in one operation.
|
||||
if args:
|
||||
return super(PRecord, self).set(args[0], args[1])
|
||||
|
||||
return self.update(kwargs)
|
||||
|
||||
def evolver(self):
|
||||
"""
|
||||
Returns an evolver of this object.
|
||||
"""
|
||||
return _PRecordEvolver(self.__class__, self)
|
||||
|
||||
def __repr__(self):
|
||||
return "{0}({1})".format(self.__class__.__name__,
|
||||
', '.join('{0}={1}'.format(k, repr(v)) for k, v in self.items()))
|
||||
|
||||
@classmethod
|
||||
def create(cls, kwargs, _factory_fields=None, ignore_extra=False):
|
||||
"""
|
||||
Factory method. Will create a new PRecord of the current type and assign the values
|
||||
specified in kwargs.
|
||||
|
||||
:param ignore_extra: A boolean which when set to True will ignore any keys which appear in kwargs that are not
|
||||
in the set of fields on the PRecord.
|
||||
"""
|
||||
if isinstance(kwargs, cls):
|
||||
return kwargs
|
||||
|
||||
if ignore_extra:
|
||||
kwargs = {k: kwargs[k] for k in cls._precord_fields if k in kwargs}
|
||||
|
||||
return cls(_factory_fields=_factory_fields, _ignore_extra=ignore_extra, **kwargs)
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return _restore_pickle, (self.__class__, dict(self),)
|
||||
|
||||
def serialize(self, format=None):
|
||||
"""
|
||||
Serialize the current PRecord using custom serializer functions for fields where
|
||||
such have been supplied.
|
||||
"""
|
||||
return dict((k, serialize(self._precord_fields[k].serializer, format, v)) for k, v in self.items())
|
||||
|
||||
|
||||
class _PRecordEvolver(PMap._Evolver):
|
||||
__slots__ = ('_destination_cls', '_invariant_error_codes', '_missing_fields', '_factory_fields', '_ignore_extra')
|
||||
|
||||
def __init__(self, cls, original_pmap, _factory_fields=None, _ignore_extra=False):
|
||||
super(_PRecordEvolver, self).__init__(original_pmap)
|
||||
self._destination_cls = cls
|
||||
self._invariant_error_codes = []
|
||||
self._missing_fields = []
|
||||
self._factory_fields = _factory_fields
|
||||
self._ignore_extra = _ignore_extra
|
||||
|
||||
def __setitem__(self, key, original_value):
|
||||
self.set(key, original_value)
|
||||
|
||||
def set(self, key, original_value):
|
||||
field = self._destination_cls._precord_fields.get(key)
|
||||
if field:
|
||||
if self._factory_fields is None or field in self._factory_fields:
|
||||
try:
|
||||
if is_field_ignore_extra_complaint(PRecord, field, self._ignore_extra):
|
||||
value = field.factory(original_value, ignore_extra=self._ignore_extra)
|
||||
else:
|
||||
value = field.factory(original_value)
|
||||
except InvariantException as e:
|
||||
self._invariant_error_codes += e.invariant_errors
|
||||
self._missing_fields += e.missing_fields
|
||||
return self
|
||||
else:
|
||||
value = original_value
|
||||
|
||||
check_type(self._destination_cls, field, key, value)
|
||||
|
||||
is_ok, error_code = field.invariant(value)
|
||||
if not is_ok:
|
||||
self._invariant_error_codes.append(error_code)
|
||||
|
||||
return super(_PRecordEvolver, self).set(key, value)
|
||||
else:
|
||||
raise AttributeError("'{0}' is not among the specified fields for {1}".format(key, self._destination_cls.__name__))
|
||||
|
||||
def persistent(self):
|
||||
cls = self._destination_cls
|
||||
is_dirty = self.is_dirty()
|
||||
pm = super(_PRecordEvolver, self).persistent()
|
||||
if is_dirty or not isinstance(pm, cls):
|
||||
result = cls(_precord_buckets=pm._buckets, _precord_size=pm._size)
|
||||
else:
|
||||
result = pm
|
||||
|
||||
if cls._precord_mandatory_fields:
|
||||
self._missing_fields += tuple('{0}.{1}'.format(cls.__name__, f) for f
|
||||
in (cls._precord_mandatory_fields - set(result.keys())))
|
||||
|
||||
if self._invariant_error_codes or self._missing_fields:
|
||||
raise InvariantException(tuple(self._invariant_error_codes), tuple(self._missing_fields),
|
||||
'Field invariant failed')
|
||||
|
||||
check_global_invariants(result, cls._precord_invariants)
|
||||
|
||||
return result
|
227
.venv/Lib/site-packages/pyrsistent/_pset.py
Normal file
227
.venv/Lib/site-packages/pyrsistent/_pset.py
Normal file
@ -0,0 +1,227 @@
|
||||
from collections.abc import Set, Hashable
|
||||
import sys
|
||||
from pyrsistent._pmap import pmap
|
||||
|
||||
|
||||
class PSet(object):
|
||||
"""
|
||||
Persistent set implementation. Built on top of the persistent map. The set supports all operations
|
||||
in the Set protocol and is Hashable.
|
||||
|
||||
Do not instantiate directly, instead use the factory functions :py:func:`s` or :py:func:`pset`
|
||||
to create an instance.
|
||||
|
||||
Random access and insert is log32(n) where n is the size of the set.
|
||||
|
||||
Some examples:
|
||||
|
||||
>>> s = pset([1, 2, 3, 1])
|
||||
>>> s2 = s.add(4)
|
||||
>>> s3 = s2.remove(2)
|
||||
>>> s
|
||||
pset([1, 2, 3])
|
||||
>>> s2
|
||||
pset([1, 2, 3, 4])
|
||||
>>> s3
|
||||
pset([1, 3, 4])
|
||||
"""
|
||||
__slots__ = ('_map', '__weakref__')
|
||||
|
||||
def __new__(cls, m):
|
||||
self = super(PSet, cls).__new__(cls)
|
||||
self._map = m
|
||||
return self
|
||||
|
||||
def __contains__(self, element):
|
||||
return element in self._map
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._map)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._map)
|
||||
|
||||
def __repr__(self):
|
||||
if not self:
|
||||
return 'p' + str(set(self))
|
||||
|
||||
return 'pset([{0}])'.format(str(set(self))[1:-1])
|
||||
|
||||
def __str__(self):
|
||||
return self.__repr__()
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._map)
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return pset, (list(self),)
|
||||
|
||||
@classmethod
|
||||
def _from_iterable(cls, it, pre_size=8):
|
||||
return PSet(pmap(dict((k, True) for k in it), pre_size=pre_size))
|
||||
|
||||
def add(self, element):
|
||||
"""
|
||||
Return a new PSet with element added
|
||||
|
||||
>>> s1 = s(1, 2)
|
||||
>>> s1.add(3)
|
||||
pset([1, 2, 3])
|
||||
"""
|
||||
return self.evolver().add(element).persistent()
|
||||
|
||||
def update(self, iterable):
|
||||
"""
|
||||
Return a new PSet with elements in iterable added
|
||||
|
||||
>>> s1 = s(1, 2)
|
||||
>>> s1.update([3, 4, 4])
|
||||
pset([1, 2, 3, 4])
|
||||
"""
|
||||
e = self.evolver()
|
||||
for element in iterable:
|
||||
e.add(element)
|
||||
|
||||
return e.persistent()
|
||||
|
||||
def remove(self, element):
|
||||
"""
|
||||
Return a new PSet with element removed. Raises KeyError if element is not present.
|
||||
|
||||
>>> s1 = s(1, 2)
|
||||
>>> s1.remove(2)
|
||||
pset([1])
|
||||
"""
|
||||
if element in self._map:
|
||||
return self.evolver().remove(element).persistent()
|
||||
|
||||
raise KeyError("Element '%s' not present in PSet" % repr(element))
|
||||
|
||||
def discard(self, element):
|
||||
"""
|
||||
Return a new PSet with element removed. Returns itself if element is not present.
|
||||
"""
|
||||
if element in self._map:
|
||||
return self.evolver().remove(element).persistent()
|
||||
|
||||
return self
|
||||
|
||||
class _Evolver(object):
|
||||
__slots__ = ('_original_pset', '_pmap_evolver')
|
||||
|
||||
def __init__(self, original_pset):
|
||||
self._original_pset = original_pset
|
||||
self._pmap_evolver = original_pset._map.evolver()
|
||||
|
||||
def add(self, element):
|
||||
self._pmap_evolver[element] = True
|
||||
return self
|
||||
|
||||
def remove(self, element):
|
||||
del self._pmap_evolver[element]
|
||||
return self
|
||||
|
||||
def is_dirty(self):
|
||||
return self._pmap_evolver.is_dirty()
|
||||
|
||||
def persistent(self):
|
||||
if not self.is_dirty():
|
||||
return self._original_pset
|
||||
|
||||
return PSet(self._pmap_evolver.persistent())
|
||||
|
||||
def __len__(self):
|
||||
return len(self._pmap_evolver)
|
||||
|
||||
def copy(self):
|
||||
return self
|
||||
|
||||
def evolver(self):
|
||||
"""
|
||||
Create a new evolver for this pset. For a discussion on evolvers in general see the
|
||||
documentation for the pvector evolver.
|
||||
|
||||
Create the evolver and perform various mutating updates to it:
|
||||
|
||||
>>> s1 = s(1, 2, 3)
|
||||
>>> e = s1.evolver()
|
||||
>>> _ = e.add(4)
|
||||
>>> len(e)
|
||||
4
|
||||
>>> _ = e.remove(1)
|
||||
|
||||
The underlying pset remains the same:
|
||||
|
||||
>>> s1
|
||||
pset([1, 2, 3])
|
||||
|
||||
The changes are kept in the evolver. An updated pmap can be created using the
|
||||
persistent() function on the evolver.
|
||||
|
||||
>>> s2 = e.persistent()
|
||||
>>> s2
|
||||
pset([2, 3, 4])
|
||||
|
||||
The new pset will share data with the original pset in the same way that would have
|
||||
been done if only using operations on the pset.
|
||||
"""
|
||||
return PSet._Evolver(self)
|
||||
|
||||
# All the operations and comparisons you would expect on a set.
|
||||
#
|
||||
# This is not very beautiful. If we avoid inheriting from PSet we can use the
|
||||
# __slots__ concepts (which requires a new style class) and hopefully save some memory.
|
||||
__le__ = Set.__le__
|
||||
__lt__ = Set.__lt__
|
||||
__gt__ = Set.__gt__
|
||||
__ge__ = Set.__ge__
|
||||
__eq__ = Set.__eq__
|
||||
__ne__ = Set.__ne__
|
||||
|
||||
__and__ = Set.__and__
|
||||
__or__ = Set.__or__
|
||||
__sub__ = Set.__sub__
|
||||
__xor__ = Set.__xor__
|
||||
|
||||
issubset = __le__
|
||||
issuperset = __ge__
|
||||
union = __or__
|
||||
intersection = __and__
|
||||
difference = __sub__
|
||||
symmetric_difference = __xor__
|
||||
|
||||
isdisjoint = Set.isdisjoint
|
||||
|
||||
Set.register(PSet)
|
||||
Hashable.register(PSet)
|
||||
|
||||
_EMPTY_PSET = PSet(pmap())
|
||||
|
||||
|
||||
def pset(iterable=(), pre_size=8):
|
||||
"""
|
||||
Creates a persistent set from iterable. Optionally takes a sizing parameter equivalent to that
|
||||
used for :py:func:`pmap`.
|
||||
|
||||
>>> s1 = pset([1, 2, 3, 2])
|
||||
>>> s1
|
||||
pset([1, 2, 3])
|
||||
"""
|
||||
if not iterable:
|
||||
return _EMPTY_PSET
|
||||
|
||||
return PSet._from_iterable(iterable, pre_size=pre_size)
|
||||
|
||||
|
||||
def s(*elements):
|
||||
"""
|
||||
Create a persistent set.
|
||||
|
||||
Takes an arbitrary number of arguments to insert into the new set.
|
||||
|
||||
>>> s1 = s(1, 2, 3, 2)
|
||||
>>> s1
|
||||
pset([1, 2, 3])
|
||||
"""
|
||||
return pset(elements)
|
711
.venv/Lib/site-packages/pyrsistent/_pvector.py
Normal file
711
.venv/Lib/site-packages/pyrsistent/_pvector.py
Normal file
@ -0,0 +1,711 @@
|
||||
from abc import abstractmethod, ABCMeta
|
||||
from collections.abc import Sequence, Hashable
|
||||
from numbers import Integral
|
||||
import operator
|
||||
from pyrsistent._transformations import transform
|
||||
|
||||
|
||||
def _bitcount(val):
|
||||
return bin(val).count("1")
|
||||
|
||||
BRANCH_FACTOR = 32
|
||||
BIT_MASK = BRANCH_FACTOR - 1
|
||||
SHIFT = _bitcount(BIT_MASK)
|
||||
|
||||
|
||||
def compare_pvector(v, other, operator):
|
||||
return operator(v.tolist(), other.tolist() if isinstance(other, PVector) else other)
|
||||
|
||||
|
||||
def _index_or_slice(index, stop):
|
||||
if stop is None:
|
||||
return index
|
||||
|
||||
return slice(index, stop)
|
||||
|
||||
|
||||
class PythonPVector(object):
|
||||
"""
|
||||
Support structure for PVector that implements structural sharing for vectors using a trie.
|
||||
"""
|
||||
__slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '__weakref__')
|
||||
|
||||
def __new__(cls, count, shift, root, tail):
|
||||
self = super(PythonPVector, cls).__new__(cls)
|
||||
self._count = count
|
||||
self._shift = shift
|
||||
self._root = root
|
||||
self._tail = tail
|
||||
|
||||
# Derived attribute stored for performance
|
||||
self._tail_offset = self._count - len(self._tail)
|
||||
return self
|
||||
|
||||
def __len__(self):
|
||||
return self._count
|
||||
|
||||
def __getitem__(self, index):
|
||||
if isinstance(index, slice):
|
||||
# There are more conditions than the below where it would be OK to
|
||||
# return ourselves, implement those...
|
||||
if index.start is None and index.stop is None and index.step is None:
|
||||
return self
|
||||
|
||||
# This is a bit nasty realizing the whole structure as a list before
|
||||
# slicing it but it is the fastest way I've found to date, and it's easy :-)
|
||||
return _EMPTY_PVECTOR.extend(self.tolist()[index])
|
||||
|
||||
if index < 0:
|
||||
index += self._count
|
||||
|
||||
return PythonPVector._node_for(self, index)[index & BIT_MASK]
|
||||
|
||||
def __add__(self, other):
|
||||
return self.extend(other)
|
||||
|
||||
def __repr__(self):
|
||||
return 'pvector({0})'.format(str(self.tolist()))
|
||||
|
||||
def __str__(self):
|
||||
return self.__repr__()
|
||||
|
||||
def __iter__(self):
|
||||
# This is kind of lazy and will produce some memory overhead but it is the fasted method
|
||||
# by far of those tried since it uses the speed of the built in python list directly.
|
||||
return iter(self.tolist())
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self is other or (hasattr(other, '__len__') and self._count == len(other)) and compare_pvector(self, other, operator.eq)
|
||||
|
||||
def __gt__(self, other):
|
||||
return compare_pvector(self, other, operator.gt)
|
||||
|
||||
def __lt__(self, other):
|
||||
return compare_pvector(self, other, operator.lt)
|
||||
|
||||
def __ge__(self, other):
|
||||
return compare_pvector(self, other, operator.ge)
|
||||
|
||||
def __le__(self, other):
|
||||
return compare_pvector(self, other, operator.le)
|
||||
|
||||
def __mul__(self, times):
|
||||
if times <= 0 or self is _EMPTY_PVECTOR:
|
||||
return _EMPTY_PVECTOR
|
||||
|
||||
if times == 1:
|
||||
return self
|
||||
|
||||
return _EMPTY_PVECTOR.extend(times * self.tolist())
|
||||
|
||||
__rmul__ = __mul__
|
||||
|
||||
def _fill_list(self, node, shift, the_list):
|
||||
if shift:
|
||||
shift -= SHIFT
|
||||
for n in node:
|
||||
self._fill_list(n, shift, the_list)
|
||||
else:
|
||||
the_list.extend(node)
|
||||
|
||||
def tolist(self):
|
||||
"""
|
||||
The fastest way to convert the vector into a python list.
|
||||
"""
|
||||
the_list = []
|
||||
self._fill_list(self._root, self._shift, the_list)
|
||||
the_list.extend(self._tail)
|
||||
return the_list
|
||||
|
||||
def _totuple(self):
|
||||
"""
|
||||
Returns the content as a python tuple.
|
||||
"""
|
||||
return tuple(self.tolist())
|
||||
|
||||
def __hash__(self):
|
||||
# Taking the easy way out again...
|
||||
return hash(self._totuple())
|
||||
|
||||
def transform(self, *transformations):
|
||||
return transform(self, transformations)
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return pvector, (self.tolist(),)
|
||||
|
||||
def mset(self, *args):
|
||||
if len(args) % 2:
|
||||
raise TypeError("mset expected an even number of arguments")
|
||||
|
||||
evolver = self.evolver()
|
||||
for i in range(0, len(args), 2):
|
||||
evolver[args[i]] = args[i+1]
|
||||
|
||||
return evolver.persistent()
|
||||
|
||||
class Evolver(object):
|
||||
__slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '_dirty_nodes',
|
||||
'_extra_tail', '_cached_leafs', '_orig_pvector')
|
||||
|
||||
def __init__(self, v):
|
||||
self._reset(v)
|
||||
|
||||
def __getitem__(self, index):
|
||||
if not isinstance(index, Integral):
|
||||
raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
|
||||
|
||||
if index < 0:
|
||||
index += self._count + len(self._extra_tail)
|
||||
|
||||
if self._count <= index < self._count + len(self._extra_tail):
|
||||
return self._extra_tail[index - self._count]
|
||||
|
||||
return PythonPVector._node_for(self, index)[index & BIT_MASK]
|
||||
|
||||
def _reset(self, v):
|
||||
self._count = v._count
|
||||
self._shift = v._shift
|
||||
self._root = v._root
|
||||
self._tail = v._tail
|
||||
self._tail_offset = v._tail_offset
|
||||
self._dirty_nodes = {}
|
||||
self._cached_leafs = {}
|
||||
self._extra_tail = []
|
||||
self._orig_pvector = v
|
||||
|
||||
def append(self, element):
|
||||
self._extra_tail.append(element)
|
||||
return self
|
||||
|
||||
def extend(self, iterable):
|
||||
self._extra_tail.extend(iterable)
|
||||
return self
|
||||
|
||||
def set(self, index, val):
|
||||
self[index] = val
|
||||
return self
|
||||
|
||||
def __setitem__(self, index, val):
|
||||
if not isinstance(index, Integral):
|
||||
raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
|
||||
|
||||
if index < 0:
|
||||
index += self._count + len(self._extra_tail)
|
||||
|
||||
if 0 <= index < self._count:
|
||||
node = self._cached_leafs.get(index >> SHIFT)
|
||||
if node:
|
||||
node[index & BIT_MASK] = val
|
||||
elif index >= self._tail_offset:
|
||||
if id(self._tail) not in self._dirty_nodes:
|
||||
self._tail = list(self._tail)
|
||||
self._dirty_nodes[id(self._tail)] = True
|
||||
self._cached_leafs[index >> SHIFT] = self._tail
|
||||
self._tail[index & BIT_MASK] = val
|
||||
else:
|
||||
self._root = self._do_set(self._shift, self._root, index, val)
|
||||
elif self._count <= index < self._count + len(self._extra_tail):
|
||||
self._extra_tail[index - self._count] = val
|
||||
elif index == self._count + len(self._extra_tail):
|
||||
self._extra_tail.append(val)
|
||||
else:
|
||||
raise IndexError("Index out of range: %s" % (index,))
|
||||
|
||||
def _do_set(self, level, node, i, val):
|
||||
if id(node) in self._dirty_nodes:
|
||||
ret = node
|
||||
else:
|
||||
ret = list(node)
|
||||
self._dirty_nodes[id(ret)] = True
|
||||
|
||||
if level == 0:
|
||||
ret[i & BIT_MASK] = val
|
||||
self._cached_leafs[i >> SHIFT] = ret
|
||||
else:
|
||||
sub_index = (i >> level) & BIT_MASK # >>>
|
||||
ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val)
|
||||
|
||||
return ret
|
||||
|
||||
def delete(self, index):
|
||||
del self[index]
|
||||
return self
|
||||
|
||||
def __delitem__(self, key):
|
||||
if self._orig_pvector:
|
||||
# All structural sharing bets are off, base evolver on _extra_tail only
|
||||
l = PythonPVector(self._count, self._shift, self._root, self._tail).tolist()
|
||||
l.extend(self._extra_tail)
|
||||
self._reset(_EMPTY_PVECTOR)
|
||||
self._extra_tail = l
|
||||
|
||||
del self._extra_tail[key]
|
||||
|
||||
def persistent(self):
|
||||
result = self._orig_pvector
|
||||
if self.is_dirty():
|
||||
result = PythonPVector(self._count, self._shift, self._root, self._tail).extend(self._extra_tail)
|
||||
self._reset(result)
|
||||
|
||||
return result
|
||||
|
||||
def __len__(self):
|
||||
return self._count + len(self._extra_tail)
|
||||
|
||||
def is_dirty(self):
|
||||
return bool(self._dirty_nodes or self._extra_tail)
|
||||
|
||||
def evolver(self):
|
||||
return PythonPVector.Evolver(self)
|
||||
|
||||
def set(self, i, val):
|
||||
# This method could be implemented by a call to mset() but doing so would cause
|
||||
# a ~5 X performance penalty on PyPy (considered the primary platform for this implementation
|
||||
# of PVector) so we're keeping this implementation for now.
|
||||
|
||||
if not isinstance(i, Integral):
|
||||
raise TypeError("'%s' object cannot be interpreted as an index" % type(i).__name__)
|
||||
|
||||
if i < 0:
|
||||
i += self._count
|
||||
|
||||
if 0 <= i < self._count:
|
||||
if i >= self._tail_offset:
|
||||
new_tail = list(self._tail)
|
||||
new_tail[i & BIT_MASK] = val
|
||||
return PythonPVector(self._count, self._shift, self._root, new_tail)
|
||||
|
||||
return PythonPVector(self._count, self._shift, self._do_set(self._shift, self._root, i, val), self._tail)
|
||||
|
||||
if i == self._count:
|
||||
return self.append(val)
|
||||
|
||||
raise IndexError("Index out of range: %s" % (i,))
|
||||
|
||||
def _do_set(self, level, node, i, val):
|
||||
ret = list(node)
|
||||
if level == 0:
|
||||
ret[i & BIT_MASK] = val
|
||||
else:
|
||||
sub_index = (i >> level) & BIT_MASK # >>>
|
||||
ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val)
|
||||
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def _node_for(pvector_like, i):
|
||||
if 0 <= i < pvector_like._count:
|
||||
if i >= pvector_like._tail_offset:
|
||||
return pvector_like._tail
|
||||
|
||||
node = pvector_like._root
|
||||
for level in range(pvector_like._shift, 0, -SHIFT):
|
||||
node = node[(i >> level) & BIT_MASK] # >>>
|
||||
|
||||
return node
|
||||
|
||||
raise IndexError("Index out of range: %s" % (i,))
|
||||
|
||||
def _create_new_root(self):
|
||||
new_shift = self._shift
|
||||
|
||||
# Overflow root?
|
||||
if (self._count >> SHIFT) > (1 << self._shift): # >>>
|
||||
new_root = [self._root, self._new_path(self._shift, self._tail)]
|
||||
new_shift += SHIFT
|
||||
else:
|
||||
new_root = self._push_tail(self._shift, self._root, self._tail)
|
||||
|
||||
return new_root, new_shift
|
||||
|
||||
def append(self, val):
|
||||
if len(self._tail) < BRANCH_FACTOR:
|
||||
new_tail = list(self._tail)
|
||||
new_tail.append(val)
|
||||
return PythonPVector(self._count + 1, self._shift, self._root, new_tail)
|
||||
|
||||
# Full tail, push into tree
|
||||
new_root, new_shift = self._create_new_root()
|
||||
return PythonPVector(self._count + 1, new_shift, new_root, [val])
|
||||
|
||||
def _new_path(self, level, node):
|
||||
if level == 0:
|
||||
return node
|
||||
|
||||
return [self._new_path(level - SHIFT, node)]
|
||||
|
||||
def _mutating_insert_tail(self):
|
||||
self._root, self._shift = self._create_new_root()
|
||||
self._tail = []
|
||||
|
||||
def _mutating_fill_tail(self, offset, sequence):
|
||||
max_delta_len = BRANCH_FACTOR - len(self._tail)
|
||||
delta = sequence[offset:offset + max_delta_len]
|
||||
self._tail.extend(delta)
|
||||
delta_len = len(delta)
|
||||
self._count += delta_len
|
||||
return offset + delta_len
|
||||
|
||||
def _mutating_extend(self, sequence):
|
||||
offset = 0
|
||||
sequence_len = len(sequence)
|
||||
while offset < sequence_len:
|
||||
offset = self._mutating_fill_tail(offset, sequence)
|
||||
if len(self._tail) == BRANCH_FACTOR:
|
||||
self._mutating_insert_tail()
|
||||
|
||||
self._tail_offset = self._count - len(self._tail)
|
||||
|
||||
def extend(self, obj):
|
||||
# Mutates the new vector directly for efficiency but that's only an
|
||||
# implementation detail, once it is returned it should be considered immutable
|
||||
l = obj.tolist() if isinstance(obj, PythonPVector) else list(obj)
|
||||
if l:
|
||||
new_vector = self.append(l[0])
|
||||
new_vector._mutating_extend(l[1:])
|
||||
return new_vector
|
||||
|
||||
return self
|
||||
|
||||
def _push_tail(self, level, parent, tail_node):
|
||||
"""
|
||||
if parent is leaf, insert node,
|
||||
else does it map to an existing child? ->
|
||||
node_to_insert = push node one more level
|
||||
else alloc new path
|
||||
|
||||
return node_to_insert placed in copy of parent
|
||||
"""
|
||||
ret = list(parent)
|
||||
|
||||
if level == SHIFT:
|
||||
ret.append(tail_node)
|
||||
return ret
|
||||
|
||||
sub_index = ((self._count - 1) >> level) & BIT_MASK # >>>
|
||||
if len(parent) > sub_index:
|
||||
ret[sub_index] = self._push_tail(level - SHIFT, parent[sub_index], tail_node)
|
||||
return ret
|
||||
|
||||
ret.append(self._new_path(level - SHIFT, tail_node))
|
||||
return ret
|
||||
|
||||
def index(self, value, *args, **kwargs):
|
||||
return self.tolist().index(value, *args, **kwargs)
|
||||
|
||||
def count(self, value):
|
||||
return self.tolist().count(value)
|
||||
|
||||
def delete(self, index, stop=None):
|
||||
l = self.tolist()
|
||||
del l[_index_or_slice(index, stop)]
|
||||
return _EMPTY_PVECTOR.extend(l)
|
||||
|
||||
def remove(self, value):
|
||||
l = self.tolist()
|
||||
l.remove(value)
|
||||
return _EMPTY_PVECTOR.extend(l)
|
||||
|
||||
class PVector(metaclass=ABCMeta):
|
||||
"""
|
||||
Persistent vector implementation. Meant as a replacement for the cases where you would normally
|
||||
use a Python list.
|
||||
|
||||
Do not instantiate directly, instead use the factory functions :py:func:`v` and :py:func:`pvector` to
|
||||
create an instance.
|
||||
|
||||
Heavily influenced by the persistent vector available in Clojure. Initially this was more or
|
||||
less just a port of the Java code for the Clojure vector. It has since been modified and to
|
||||
some extent optimized for usage in Python.
|
||||
|
||||
The vector is organized as a trie, any mutating method will return a new vector that contains the changes. No
|
||||
updates are done to the original vector. Structural sharing between vectors are applied where possible to save
|
||||
space and to avoid making complete copies.
|
||||
|
||||
This structure corresponds most closely to the built in list type and is intended as a replacement. Where the
|
||||
semantics are the same (more or less) the same function names have been used but for some cases it is not possible,
|
||||
for example assignments.
|
||||
|
||||
The PVector implements the Sequence protocol and is Hashable.
|
||||
|
||||
Inserts are amortized O(1). Random access is log32(n) where n is the size of the vector.
|
||||
|
||||
The following are examples of some common operations on persistent vectors:
|
||||
|
||||
>>> p = v(1, 2, 3)
|
||||
>>> p2 = p.append(4)
|
||||
>>> p3 = p2.extend([5, 6, 7])
|
||||
>>> p
|
||||
pvector([1, 2, 3])
|
||||
>>> p2
|
||||
pvector([1, 2, 3, 4])
|
||||
>>> p3
|
||||
pvector([1, 2, 3, 4, 5, 6, 7])
|
||||
>>> p3[5]
|
||||
6
|
||||
>>> p.set(1, 99)
|
||||
pvector([1, 99, 3])
|
||||
>>>
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def __len__(self):
|
||||
"""
|
||||
>>> len(v(1, 2, 3))
|
||||
3
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Get value at index. Full slicing support.
|
||||
|
||||
>>> v1 = v(5, 6, 7, 8)
|
||||
>>> v1[2]
|
||||
7
|
||||
>>> v1[1:3]
|
||||
pvector([6, 7])
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def __add__(self, other):
|
||||
"""
|
||||
>>> v1 = v(1, 2)
|
||||
>>> v2 = v(3, 4)
|
||||
>>> v1 + v2
|
||||
pvector([1, 2, 3, 4])
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def __mul__(self, times):
|
||||
"""
|
||||
>>> v1 = v(1, 2)
|
||||
>>> 3 * v1
|
||||
pvector([1, 2, 1, 2, 1, 2])
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def __hash__(self):
|
||||
"""
|
||||
>>> v1 = v(1, 2, 3)
|
||||
>>> v2 = v(1, 2, 3)
|
||||
>>> hash(v1) == hash(v2)
|
||||
True
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def evolver(self):
|
||||
"""
|
||||
Create a new evolver for this pvector. The evolver acts as a mutable view of the vector
|
||||
with "transaction like" semantics. No part of the underlying vector i updated, it is still
|
||||
fully immutable. Furthermore multiple evolvers created from the same pvector do not
|
||||
interfere with each other.
|
||||
|
||||
You may want to use an evolver instead of working directly with the pvector in the
|
||||
following cases:
|
||||
|
||||
* Multiple updates are done to the same vector and the intermediate results are of no
|
||||
interest. In this case using an evolver may be a more efficient and easier to work with.
|
||||
* You need to pass a vector into a legacy function or a function that you have no control
|
||||
over which performs in place mutations of lists. In this case pass an evolver instance
|
||||
instead and then create a new pvector from the evolver once the function returns.
|
||||
|
||||
The following example illustrates a typical workflow when working with evolvers. It also
|
||||
displays most of the API (which i kept small by design, you should not be tempted to
|
||||
use evolvers in excess ;-)).
|
||||
|
||||
Create the evolver and perform various mutating updates to it:
|
||||
|
||||
>>> v1 = v(1, 2, 3, 4, 5)
|
||||
>>> e = v1.evolver()
|
||||
>>> e[1] = 22
|
||||
>>> _ = e.append(6)
|
||||
>>> _ = e.extend([7, 8, 9])
|
||||
>>> e[8] += 1
|
||||
>>> len(e)
|
||||
9
|
||||
|
||||
The underlying pvector remains the same:
|
||||
|
||||
>>> v1
|
||||
pvector([1, 2, 3, 4, 5])
|
||||
|
||||
The changes are kept in the evolver. An updated pvector can be created using the
|
||||
persistent() function on the evolver.
|
||||
|
||||
>>> v2 = e.persistent()
|
||||
>>> v2
|
||||
pvector([1, 22, 3, 4, 5, 6, 7, 8, 10])
|
||||
|
||||
The new pvector will share data with the original pvector in the same way that would have
|
||||
been done if only using operations on the pvector.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def mset(self, *args):
|
||||
"""
|
||||
Return a new vector with elements in specified positions replaced by values (multi set).
|
||||
|
||||
Elements on even positions in the argument list are interpreted as indexes while
|
||||
elements on odd positions are considered values.
|
||||
|
||||
>>> v1 = v(1, 2, 3)
|
||||
>>> v1.mset(0, 11, 2, 33)
|
||||
pvector([11, 2, 33])
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def set(self, i, val):
|
||||
"""
|
||||
Return a new vector with element at position i replaced with val. The original vector remains unchanged.
|
||||
|
||||
Setting a value one step beyond the end of the vector is equal to appending. Setting beyond that will
|
||||
result in an IndexError.
|
||||
|
||||
>>> v1 = v(1, 2, 3)
|
||||
>>> v1.set(1, 4)
|
||||
pvector([1, 4, 3])
|
||||
>>> v1.set(3, 4)
|
||||
pvector([1, 2, 3, 4])
|
||||
>>> v1.set(-1, 4)
|
||||
pvector([1, 2, 4])
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def append(self, val):
|
||||
"""
|
||||
Return a new vector with val appended.
|
||||
|
||||
>>> v1 = v(1, 2)
|
||||
>>> v1.append(3)
|
||||
pvector([1, 2, 3])
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def extend(self, obj):
|
||||
"""
|
||||
Return a new vector with all values in obj appended to it. Obj may be another
|
||||
PVector or any other Iterable.
|
||||
|
||||
>>> v1 = v(1, 2, 3)
|
||||
>>> v1.extend([4, 5])
|
||||
pvector([1, 2, 3, 4, 5])
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def index(self, value, *args, **kwargs):
|
||||
"""
|
||||
Return first index of value. Additional indexes may be supplied to limit the search to a
|
||||
sub range of the vector.
|
||||
|
||||
>>> v1 = v(1, 2, 3, 4, 3)
|
||||
>>> v1.index(3)
|
||||
2
|
||||
>>> v1.index(3, 3, 5)
|
||||
4
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def count(self, value):
|
||||
"""
|
||||
Return the number of times that value appears in the vector.
|
||||
|
||||
>>> v1 = v(1, 4, 3, 4)
|
||||
>>> v1.count(4)
|
||||
2
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def transform(self, *transformations):
|
||||
"""
|
||||
Transform arbitrarily complex combinations of PVectors and PMaps. A transformation
|
||||
consists of two parts. One match expression that specifies which elements to transform
|
||||
and one transformation function that performs the actual transformation.
|
||||
|
||||
>>> from pyrsistent import freeze, ny
|
||||
>>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
|
||||
... {'author': 'Steve', 'content': 'A slightly longer article'}],
|
||||
... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
|
||||
>>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
|
||||
>>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
|
||||
>>> very_short_news.articles[0].content
|
||||
'A short article'
|
||||
>>> very_short_news.articles[1].content
|
||||
'A slightly long...'
|
||||
|
||||
When nothing has been transformed the original data structure is kept
|
||||
|
||||
>>> short_news is news_paper
|
||||
True
|
||||
>>> very_short_news is news_paper
|
||||
False
|
||||
>>> very_short_news.articles[0] is news_paper.articles[0]
|
||||
True
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def delete(self, index, stop=None):
|
||||
"""
|
||||
Delete a portion of the vector by index or range.
|
||||
|
||||
>>> v1 = v(1, 2, 3, 4, 5)
|
||||
>>> v1.delete(1)
|
||||
pvector([1, 3, 4, 5])
|
||||
>>> v1.delete(1, 3)
|
||||
pvector([1, 4, 5])
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def remove(self, value):
|
||||
"""
|
||||
Remove the first occurrence of a value from the vector.
|
||||
|
||||
>>> v1 = v(1, 2, 3, 2, 1)
|
||||
>>> v2 = v1.remove(1)
|
||||
>>> v2
|
||||
pvector([2, 3, 2, 1])
|
||||
>>> v2.remove(1)
|
||||
pvector([2, 3, 2])
|
||||
"""
|
||||
|
||||
|
||||
_EMPTY_PVECTOR = PythonPVector(0, SHIFT, [], [])
|
||||
PVector.register(PythonPVector)
|
||||
Sequence.register(PVector)
|
||||
Hashable.register(PVector)
|
||||
|
||||
def python_pvector(iterable=()):
|
||||
"""
|
||||
Create a new persistent vector containing the elements in iterable.
|
||||
|
||||
>>> v1 = pvector([1, 2, 3])
|
||||
>>> v1
|
||||
pvector([1, 2, 3])
|
||||
"""
|
||||
return _EMPTY_PVECTOR.extend(iterable)
|
||||
|
||||
try:
|
||||
# Use the C extension as underlying trie implementation if it is available
|
||||
import os
|
||||
if os.environ.get('PYRSISTENT_NO_C_EXTENSION'):
|
||||
pvector = python_pvector
|
||||
else:
|
||||
from pvectorc import pvector
|
||||
PVector.register(type(pvector()))
|
||||
except ImportError:
|
||||
pvector = python_pvector
|
||||
|
||||
|
||||
def v(*elements):
|
||||
"""
|
||||
Create a new persistent vector containing all parameters to this function.
|
||||
|
||||
>>> v1 = v(1, 2, 3)
|
||||
>>> v1
|
||||
pvector([1, 2, 3])
|
||||
"""
|
||||
return pvector(elements)
|
83
.venv/Lib/site-packages/pyrsistent/_toolz.py
Normal file
83
.venv/Lib/site-packages/pyrsistent/_toolz.py
Normal file
@ -0,0 +1,83 @@
|
||||
"""
|
||||
Functionality copied from the toolz package to avoid having
|
||||
to add toolz as a dependency.
|
||||
|
||||
See https://github.com/pytoolz/toolz/.
|
||||
|
||||
toolz is relased under BSD licence. Below is the licence text
|
||||
from toolz as it appeared when copying the code.
|
||||
|
||||
--------------------------------------------------------------
|
||||
|
||||
Copyright (c) 2013 Matthew Rocklin
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
a. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
b. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
c. Neither the name of toolz nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
|
||||
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||
DAMAGE.
|
||||
"""
|
||||
import operator
|
||||
from functools import reduce
|
||||
|
||||
|
||||
def get_in(keys, coll, default=None, no_default=False):
|
||||
"""
|
||||
NB: This is a straight copy of the get_in implementation found in
|
||||
the toolz library (https://github.com/pytoolz/toolz/). It works
|
||||
with persistent data structures as well as the corresponding
|
||||
datastructures from the stdlib.
|
||||
|
||||
Returns coll[i0][i1]...[iX] where [i0, i1, ..., iX]==keys.
|
||||
|
||||
If coll[i0][i1]...[iX] cannot be found, returns ``default``, unless
|
||||
``no_default`` is specified, then it raises KeyError or IndexError.
|
||||
|
||||
``get_in`` is a generalization of ``operator.getitem`` for nested data
|
||||
structures such as dictionaries and lists.
|
||||
>>> from pyrsistent import freeze
|
||||
>>> transaction = freeze({'name': 'Alice',
|
||||
... 'purchase': {'items': ['Apple', 'Orange'],
|
||||
... 'costs': [0.50, 1.25]},
|
||||
... 'credit card': '5555-1234-1234-1234'})
|
||||
>>> get_in(['purchase', 'items', 0], transaction)
|
||||
'Apple'
|
||||
>>> get_in(['name'], transaction)
|
||||
'Alice'
|
||||
>>> get_in(['purchase', 'total'], transaction)
|
||||
>>> get_in(['purchase', 'items', 'apple'], transaction)
|
||||
>>> get_in(['purchase', 'items', 10], transaction)
|
||||
>>> get_in(['purchase', 'total'], transaction, 0)
|
||||
0
|
||||
>>> get_in(['y'], {}, no_default=True)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
KeyError: 'y'
|
||||
"""
|
||||
try:
|
||||
return reduce(operator.getitem, keys, coll)
|
||||
except (KeyError, IndexError, TypeError):
|
||||
if no_default:
|
||||
raise
|
||||
return default
|
139
.venv/Lib/site-packages/pyrsistent/_transformations.py
Normal file
139
.venv/Lib/site-packages/pyrsistent/_transformations.py
Normal file
@ -0,0 +1,139 @@
|
||||
import re
|
||||
try:
|
||||
from inspect import Parameter, signature
|
||||
except ImportError:
|
||||
signature = None
|
||||
from inspect import getfullargspec
|
||||
|
||||
|
||||
_EMPTY_SENTINEL = object()
|
||||
|
||||
|
||||
def inc(x):
|
||||
""" Add one to the current value """
|
||||
return x + 1
|
||||
|
||||
|
||||
def dec(x):
|
||||
""" Subtract one from the current value """
|
||||
return x - 1
|
||||
|
||||
|
||||
def discard(evolver, key):
|
||||
""" Discard the element and returns a structure without the discarded elements """
|
||||
try:
|
||||
del evolver[key]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
# Matchers
|
||||
def rex(expr):
|
||||
""" Regular expression matcher to use together with transform functions """
|
||||
r = re.compile(expr)
|
||||
return lambda key: isinstance(key, str) and r.match(key)
|
||||
|
||||
|
||||
def ny(_):
|
||||
""" Matcher that matches any value """
|
||||
return True
|
||||
|
||||
|
||||
# Support functions
|
||||
def _chunks(l, n):
|
||||
for i in range(0, len(l), n):
|
||||
yield l[i:i + n]
|
||||
|
||||
|
||||
def transform(structure, transformations):
|
||||
r = structure
|
||||
for path, command in _chunks(transformations, 2):
|
||||
r = _do_to_path(r, path, command)
|
||||
return r
|
||||
|
||||
|
||||
def _do_to_path(structure, path, command):
|
||||
if not path:
|
||||
return command(structure) if callable(command) else command
|
||||
|
||||
kvs = _get_keys_and_values(structure, path[0])
|
||||
return _update_structure(structure, kvs, path[1:], command)
|
||||
|
||||
|
||||
def _items(structure):
|
||||
try:
|
||||
return structure.items()
|
||||
except AttributeError:
|
||||
# Support wider range of structures by adding a transform_items() or similar?
|
||||
return list(enumerate(structure))
|
||||
|
||||
|
||||
def _get(structure, key, default):
|
||||
try:
|
||||
if hasattr(structure, '__getitem__'):
|
||||
return structure[key]
|
||||
|
||||
return getattr(structure, key)
|
||||
|
||||
except (IndexError, KeyError):
|
||||
return default
|
||||
|
||||
|
||||
def _get_keys_and_values(structure, key_spec):
|
||||
if callable(key_spec):
|
||||
# Support predicates as callable objects in the path
|
||||
arity = _get_arity(key_spec)
|
||||
if arity == 1:
|
||||
# Unary predicates are called with the "key" of the path
|
||||
# - eg a key in a mapping, an index in a sequence.
|
||||
return [(k, v) for k, v in _items(structure) if key_spec(k)]
|
||||
elif arity == 2:
|
||||
# Binary predicates are called with the key and the corresponding
|
||||
# value.
|
||||
return [(k, v) for k, v in _items(structure) if key_spec(k, v)]
|
||||
else:
|
||||
# Other arities are an error.
|
||||
raise ValueError(
|
||||
"callable in transform path must take 1 or 2 arguments"
|
||||
)
|
||||
|
||||
# Non-callables are used as-is as a key.
|
||||
return [(key_spec, _get(structure, key_spec, _EMPTY_SENTINEL))]
|
||||
|
||||
|
||||
if signature is None:
|
||||
def _get_arity(f):
|
||||
argspec = getfullargspec(f)
|
||||
return len(argspec.args) - len(argspec.defaults or ())
|
||||
else:
|
||||
def _get_arity(f):
|
||||
return sum(
|
||||
1
|
||||
for p
|
||||
in signature(f).parameters.values()
|
||||
if p.default is Parameter.empty
|
||||
and p.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD)
|
||||
)
|
||||
|
||||
|
||||
def _update_structure(structure, kvs, path, command):
|
||||
from pyrsistent._pmap import pmap
|
||||
e = structure.evolver()
|
||||
if not path and command is discard:
|
||||
# Do this in reverse to avoid index problems with vectors. See #92.
|
||||
for k, v in reversed(kvs):
|
||||
discard(e, k)
|
||||
else:
|
||||
for k, v in kvs:
|
||||
is_empty = False
|
||||
if v is _EMPTY_SENTINEL:
|
||||
# Allow expansion of structure but make sure to cover the case
|
||||
# when an empty pmap is added as leaf node. See #154.
|
||||
is_empty = True
|
||||
v = pmap()
|
||||
|
||||
result = _do_to_path(v, path, command)
|
||||
if result is not v or is_empty:
|
||||
e[k] = result
|
||||
|
||||
return e.persistent()
|
0
.venv/Lib/site-packages/pyrsistent/py.typed
Normal file
0
.venv/Lib/site-packages/pyrsistent/py.typed
Normal file
80
.venv/Lib/site-packages/pyrsistent/typing.py
Normal file
80
.venv/Lib/site-packages/pyrsistent/typing.py
Normal file
@ -0,0 +1,80 @@
|
||||
"""Helpers for use with type annotation.
|
||||
|
||||
Use the empty classes in this module when annotating the types of Pyrsistent
|
||||
objects, instead of using the actual collection class.
|
||||
|
||||
For example,
|
||||
|
||||
from pyrsistent import pvector
|
||||
from pyrsistent.typing import PVector
|
||||
|
||||
myvector: PVector[str] = pvector(['a', 'b', 'c'])
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
try:
|
||||
from typing import Container
|
||||
from typing import Hashable
|
||||
from typing import Generic
|
||||
from typing import Iterable
|
||||
from typing import Mapping
|
||||
from typing import Sequence
|
||||
from typing import Sized
|
||||
from typing import TypeVar
|
||||
|
||||
__all__ = [
|
||||
'CheckedPMap',
|
||||
'CheckedPSet',
|
||||
'CheckedPVector',
|
||||
'PBag',
|
||||
'PDeque',
|
||||
'PList',
|
||||
'PMap',
|
||||
'PSet',
|
||||
'PVector',
|
||||
]
|
||||
|
||||
T = TypeVar('T')
|
||||
KT = TypeVar('KT')
|
||||
VT = TypeVar('VT')
|
||||
|
||||
class CheckedPMap(Mapping[KT, VT], Hashable):
|
||||
pass
|
||||
|
||||
# PSet.add and PSet.discard have different type signatures than that of Set.
|
||||
class CheckedPSet(Generic[T], Hashable):
|
||||
pass
|
||||
|
||||
class CheckedPVector(Sequence[T], Hashable):
|
||||
pass
|
||||
|
||||
class PBag(Container[T], Iterable[T], Sized, Hashable):
|
||||
pass
|
||||
|
||||
class PDeque(Sequence[T], Hashable):
|
||||
pass
|
||||
|
||||
class PList(Sequence[T], Hashable):
|
||||
pass
|
||||
|
||||
class PMap(Mapping[KT, VT], Hashable):
|
||||
pass
|
||||
|
||||
# PSet.add and PSet.discard have different type signatures than that of Set.
|
||||
class PSet(Generic[T], Hashable):
|
||||
pass
|
||||
|
||||
class PVector(Sequence[T], Hashable):
|
||||
pass
|
||||
|
||||
class PVectorEvolver(Generic[T]):
|
||||
pass
|
||||
|
||||
class PMapEvolver(Generic[KT, VT]):
|
||||
pass
|
||||
|
||||
class PSetEvolver(Generic[T]):
|
||||
pass
|
||||
except ImportError:
|
||||
pass
|
292
.venv/Lib/site-packages/pyrsistent/typing.pyi
Normal file
292
.venv/Lib/site-packages/pyrsistent/typing.pyi
Normal file
@ -0,0 +1,292 @@
|
||||
# flake8: noqa: E704
|
||||
# from https://gist.github.com/WuTheFWasThat/091a17d4b5cab597dfd5d4c2d96faf09
|
||||
# Stubs for pyrsistent (Python 3.6)
|
||||
#
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import Generic
|
||||
from typing import Hashable
|
||||
from typing import Iterator
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import AbstractSet
|
||||
from typing import Sized
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import TypeVar
|
||||
from typing import Type
|
||||
from typing import Union
|
||||
from typing import overload
|
||||
|
||||
T = TypeVar('T')
|
||||
KT = TypeVar('KT')
|
||||
VT = TypeVar('VT')
|
||||
|
||||
|
||||
class PMap(Mapping[KT, VT], Hashable):
|
||||
def __add__(self, other: PMap[KT, VT]) -> PMap[KT, VT]: ...
|
||||
def __getitem__(self, key: KT) -> VT: ...
|
||||
def __getattr__(self, key: str) -> VT: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def __iter__(self) -> Iterator[KT]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def copy(self) -> PMap[KT, VT]: ...
|
||||
def discard(self, key: KT) -> PMap[KT, VT]: ...
|
||||
def evolver(self) -> PMapEvolver[KT, VT]: ...
|
||||
def iteritems(self) -> Iterable[Tuple[KT, VT]]: ...
|
||||
def iterkeys(self) -> Iterable[KT]: ...
|
||||
def itervalues(self) -> Iterable[VT]: ...
|
||||
def remove(self, key: KT) -> PMap[KT, VT]: ...
|
||||
def set(self, key: KT, val: VT) -> PMap[KT, VT]: ...
|
||||
def transform(self, *transformations: Any) -> PMap[KT, VT]: ...
|
||||
def update(self, *args: Mapping): ...
|
||||
def update_with(self, update_fn: Callable[[VT, VT], VT], *args: Mapping) -> Any: ...
|
||||
|
||||
|
||||
class PMapEvolver(Generic[KT, VT]):
|
||||
def __delitem__(self, key: KT) -> None: ...
|
||||
def __getitem__(self, key: KT) -> VT: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __setitem__(self, key: KT, val: VT) -> None: ...
|
||||
def is_dirty(self) -> bool: ...
|
||||
def persistent(self) -> PMap[KT, VT]: ...
|
||||
def remove(self, key: KT) -> PMapEvolver[KT, VT]: ...
|
||||
def set(self, key: KT, val: VT) -> PMapEvolver[KT, VT]: ...
|
||||
|
||||
|
||||
class PVector(Sequence[T], Hashable):
|
||||
def __add__(self, other: PVector[T]) -> PVector[T]: ...
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> T: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> PVector[T]: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __mul__(self, other: PVector[T]) -> PVector[T]: ...
|
||||
def append(self, val: T) -> PVector[T]: ...
|
||||
def delete(self, index: int, stop: Optional[int]) -> PVector[T]: ...
|
||||
def evolver(self) -> PVectorEvolver[T]: ...
|
||||
def extend(self, obj: Iterable[T]) -> PVector[T]: ...
|
||||
def tolist(self) -> List[T]: ...
|
||||
def mset(self, *args: Iterable[Union[T, int]]) -> PVector[T]: ...
|
||||
def remove(self, value: T) -> PVector[T]: ...
|
||||
# Not compatible with MutableSequence
|
||||
def set(self, i: int, val: T) -> PVector[T]: ...
|
||||
def transform(self, *transformations: Any) -> PVector[T]: ...
|
||||
|
||||
|
||||
class PVectorEvolver(Sequence[T], Sized):
|
||||
def __delitem__(self, i: Union[int, slice]) -> None: ...
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> T: ...
|
||||
# Not actually supported
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> PVectorEvolver[T]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __setitem__(self, index: int, val: T) -> None: ...
|
||||
def append(self, val: T) -> PVectorEvolver[T]: ...
|
||||
def delete(self, value: T) -> PVectorEvolver[T]: ...
|
||||
def extend(self, obj: Iterable[T]) -> PVectorEvolver[T]: ...
|
||||
def is_dirty(self) -> bool: ...
|
||||
def persistent(self) -> PVector[T]: ...
|
||||
def set(self, i: int, val: T) -> PVectorEvolver[T]: ...
|
||||
|
||||
|
||||
class PSet(AbstractSet[T], Hashable):
|
||||
def __contains__(self, element: object) -> bool: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def __iter__(self) -> Iterator[T]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def add(self, element: T) -> PSet[T]: ...
|
||||
def copy(self) -> PSet[T]: ...
|
||||
def difference(self, iterable: Iterable) -> PSet[T]: ...
|
||||
def discard(self, element: T) -> PSet[T]: ...
|
||||
def evolver(self) -> PSetEvolver[T]: ...
|
||||
def intersection(self, iterable: Iterable) -> PSet[T]: ...
|
||||
def issubset(self, iterable: Iterable) -> bool: ...
|
||||
def issuperset(self, iterable: Iterable) -> bool: ...
|
||||
def remove(self, element: T) -> PSet[T]: ...
|
||||
def symmetric_difference(self, iterable: Iterable[T]) -> PSet[T]: ...
|
||||
def union(self, iterable: Iterable[T]) -> PSet[T]: ...
|
||||
def update(self, iterable: Iterable[T]) -> PSet[T]: ...
|
||||
|
||||
|
||||
class PSetEvolver(Generic[T], Sized):
|
||||
def __len__(self) -> int: ...
|
||||
def add(self, element: T) -> PSetEvolver[T]: ...
|
||||
def is_dirty(self) -> bool: ...
|
||||
def persistent(self) -> PSet[T]: ...
|
||||
def remove(self, element: T) -> PSetEvolver[T]: ...
|
||||
|
||||
|
||||
class PBag(Generic[T], Sized, Hashable):
|
||||
def __add__(self, other: PBag[T]) -> PBag[T]: ...
|
||||
def __and__(self, other: PBag[T]) -> PBag[T]: ...
|
||||
def __contains__(self, elem: object) -> bool: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def __iter__(self) -> Iterator[T]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __or__(self, other: PBag[T]) -> PBag[T]: ...
|
||||
def __sub__(self, other: PBag[T]) -> PBag[T]: ...
|
||||
def add(self, elem: T) -> PBag[T]: ...
|
||||
def count(self, elem: T) -> int: ...
|
||||
def remove(self, elem: T) -> PBag[T]: ...
|
||||
def update(self, iterable: Iterable[T]) -> PBag[T]: ...
|
||||
|
||||
|
||||
class PDeque(Sequence[T], Hashable):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> T: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> PDeque[T]: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __lt__(self, other: PDeque[T]) -> bool: ...
|
||||
def append(self, elem: T) -> PDeque[T]: ...
|
||||
def appendleft(self, elem: T) -> PDeque[T]: ...
|
||||
def extend(self, iterable: Iterable[T]) -> PDeque[T]: ...
|
||||
def extendleft(self, iterable: Iterable[T]) -> PDeque[T]: ...
|
||||
@property
|
||||
def left(self) -> T: ...
|
||||
# The real return type is Integral according to what pyrsistent
|
||||
# checks at runtime but mypy doesn't deal in numeric.*:
|
||||
# https://github.com/python/mypy/issues/2636
|
||||
@property
|
||||
def maxlen(self) -> int: ...
|
||||
def pop(self, count: int = 1) -> PDeque[T]: ...
|
||||
def popleft(self, count: int = 1) -> PDeque[T]: ...
|
||||
def remove(self, elem: T) -> PDeque[T]: ...
|
||||
def reverse(self) -> PDeque[T]: ...
|
||||
@property
|
||||
def right(self) -> T: ...
|
||||
def rotate(self, steps: int) -> PDeque[T]: ...
|
||||
|
||||
|
||||
class PList(Sequence[T], Hashable):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> T: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> PList[T]: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __lt__(self, other: PList[T]) -> bool: ...
|
||||
def __gt__(self, other: PList[T]) -> bool: ...
|
||||
def cons(self, elem: T) -> PList[T]: ...
|
||||
@property
|
||||
def first(self) -> T: ...
|
||||
def mcons(self, iterable: Iterable[T]) -> PList[T]: ...
|
||||
def remove(self, elem: T) -> PList[T]: ...
|
||||
@property
|
||||
def rest(self) -> PList[T]: ...
|
||||
def reverse(self) -> PList[T]: ...
|
||||
def split(self, index: int) -> Tuple[PList[T], PList[T]]: ...
|
||||
|
||||
T_PClass = TypeVar('T_PClass', bound='PClass')
|
||||
|
||||
class PClass(Hashable):
|
||||
def __new__(cls, **kwargs: Any): ...
|
||||
def set(self: T_PClass, *args: Any, **kwargs: Any) -> T_PClass: ...
|
||||
@classmethod
|
||||
def create(
|
||||
cls: Type[T_PClass],
|
||||
kwargs: Any,
|
||||
_factory_fields: Optional[Any] = ...,
|
||||
ignore_extra: bool = ...,
|
||||
) -> T_PClass: ...
|
||||
def serialize(self, format: Optional[Any] = ...): ...
|
||||
def transform(self, *transformations: Any): ...
|
||||
def __eq__(self, other: object): ...
|
||||
def __ne__(self, other: object): ...
|
||||
def __hash__(self): ...
|
||||
def __reduce__(self): ...
|
||||
def evolver(self) -> PClassEvolver: ...
|
||||
def remove(self: T_PClass, name: Any) -> T_PClass: ...
|
||||
|
||||
class PClassEvolver:
|
||||
def __init__(self, original: Any, initial_dict: Any) -> None: ...
|
||||
def __getitem__(self, item: Any): ...
|
||||
def set(self, key: Any, value: Any): ...
|
||||
def __setitem__(self, key: Any, value: Any) -> None: ...
|
||||
def remove(self, item: Any): ...
|
||||
def __delitem__(self, item: Any) -> None: ...
|
||||
def persistent(self) -> PClass: ...
|
||||
def __getattr__(self, item: Any): ...
|
||||
|
||||
|
||||
|
||||
class CheckedPMap(PMap[KT, VT]):
|
||||
__key_type__: Type[KT]
|
||||
__value_type__: Type[VT]
|
||||
def __new__(cls, source: Mapping[KT, VT] = ..., size: int = ...) -> CheckedPMap: ...
|
||||
@classmethod
|
||||
def create(cls, source_data: Mapping[KT, VT], _factory_fields: Any = ...) -> CheckedPMap[KT, VT]: ...
|
||||
def serialize(self, format: Optional[Any] = ...) -> Dict[KT, VT]: ...
|
||||
|
||||
|
||||
class CheckedPVector(PVector[T]):
|
||||
__type__: Type[T]
|
||||
def __new__(self, initial: Iterable[T] = ...) -> CheckedPVector: ...
|
||||
@classmethod
|
||||
def create(cls, source_data: Iterable[T], _factory_fields: Any = ...) -> CheckedPVector[T]: ...
|
||||
def serialize(self, format: Optional[Any] = ...) -> List[T]: ...
|
||||
|
||||
|
||||
class CheckedPSet(PSet[T]):
|
||||
__type__: Type[T]
|
||||
def __new__(cls, initial: Iterable[T] = ...) -> CheckedPSet: ...
|
||||
@classmethod
|
||||
def create(cls, source_data: Iterable[T], _factory_fields: Any = ...) -> CheckedPSet[T]: ...
|
||||
def serialize(self, format: Optional[Any] = ...) -> Set[T]: ...
|
||||
|
||||
|
||||
class InvariantException(Exception):
|
||||
invariant_errors: Tuple[Any, ...] = ... # possibly nested tuple
|
||||
missing_fields: Tuple[str, ...] = ...
|
||||
def __init__(
|
||||
self,
|
||||
error_codes: Any = ...,
|
||||
missing_fields: Any = ...,
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
) -> None: ...
|
||||
|
||||
|
||||
class CheckedTypeError(TypeError):
|
||||
source_class: Type[Any]
|
||||
expected_types: Tuple[Any, ...]
|
||||
actual_type: Type[Any]
|
||||
actual_value: Any
|
||||
def __init__(
|
||||
self,
|
||||
source_class: Any,
|
||||
expected_types: Any,
|
||||
actual_type: Any,
|
||||
actual_value: Any,
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
) -> None: ...
|
||||
|
||||
|
||||
class CheckedKeyTypeError(CheckedTypeError): ...
|
||||
class CheckedValueTypeError(CheckedTypeError): ...
|
||||
class CheckedType: ...
|
||||
|
||||
|
||||
class PTypeError(TypeError):
|
||||
source_class: Type[Any] = ...
|
||||
field: str = ...
|
||||
expected_types: Tuple[Any, ...] = ...
|
||||
actual_type: Type[Any] = ...
|
||||
def __init__(
|
||||
self,
|
||||
source_class: Any,
|
||||
field: Any,
|
||||
expected_types: Any,
|
||||
actual_type: Any,
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
) -> None: ...
|
Reference in New Issue
Block a user