diff --git a/setup.py b/setup.py index 65a255a..e649628 100644 --- a/setup.py +++ b/setup.py @@ -139,6 +139,7 @@ def _read_file(filename): }, python_requires='>=3.7', install_requires=[ + 'zope.deferredimport', 'zope.interface', "cffi ; platform_python_implementation == 'CPython'", ], diff --git a/src/persistent/_compat.py b/src/persistent/_compat.py index 69b03dc..90c12ef 100644 --- a/src/persistent/_compat.py +++ b/src/persistent/_compat.py @@ -61,7 +61,7 @@ def _c_optimizations_available(): 'persistent.picklecache': cPickleCache, 'persistent.timestamp': _timestamp, } - except catch: # pragma: no cover (only Jython doesn't build extensions) + except catch: # pragma: no cover (only Jython doesn't build extensions) return {} @@ -93,7 +93,7 @@ def _should_attempt_c_optimizations(): """ if _c_optimizations_required(): return True - if PYPY: # pragma: no cover + if PYPY: # pragma: no cover return False return not _c_optimizations_ignored() @@ -157,7 +157,8 @@ def find_impl(): return py_impl c_opts = _c_optimizations_available() - if not c_opts: # pragma: no cover (only Jython doesn't build extensions) + # only Jython doesn't build extensions: + if not c_opts: # pragma: no cover return py_impl __traceback_info__ = c_opts @@ -189,7 +190,7 @@ def find_impl(): v = types.FunctionType( v.__code__, new_globals, - k, # name + k, # name v.__defaults__, v.__closure__, ) diff --git a/src/persistent/_ring_build.py b/src/persistent/_ring_build.py index e921c6a..70ece49 100644 --- a/src/persistent/_ring_build.py +++ b/src/persistent/_ring_build.py @@ -53,7 +53,7 @@ #define cffi_ring_add(ring, elt) ring_add((CPersistentRing*)ring, (CPersistentRing*)elt) #define cffi_ring_del(elt) ring_del((CPersistentRing*)elt) #define cffi_ring_move_to_head(ring, elt) ring_move_to_head((CPersistentRing*)ring, (CPersistentRing*)elt) -""" +""" # noqa: E501 line too long ffi.set_source('persistent._ring', source, diff --git a/src/persistent/dict.py b/src/persistent/dict.py index 77ed37b..3375313 100644 --- a/src/persistent/dict.py +++ b/src/persistent/dict.py @@ -11,6 +11,13 @@ # FOR A PARTICULAR PURPOSE. # ############################################################################## +from zope.deferredimport import deprecated -# persistent.dict is deprecated. Use persistent.mapping -from persistent.mapping import PersistentMapping as PersistentDict + +deprecated( + "`persistent.dict.PersistentDict` is deprecated. Use" + " `persistent.mapping.PersistentMapping` instead." + " This backward compatibility shim will be removed in persistent" + " version 6.", + PersistentDict='persistent.mapping:PersistentMapping', +) diff --git a/src/persistent/interfaces.py b/src/persistent/interfaces.py index eee60bb..6af222b 100644 --- a/src/persistent/interfaces.py +++ b/src/persistent/interfaces.py @@ -32,6 +32,7 @@ OID_TYPE = SERIAL_TYPE = bytes + class IPersistent(Interface): """Python persistent interface @@ -356,6 +357,7 @@ def _p_delattr(name): # TODO: document conflict resolution. + class IPersistentDataManager(Interface): """Provide services for managing persistent state. @@ -399,13 +401,13 @@ def register(object): """ # Maybe later: -## def mtime(object): -## """Return the modification time of the object. +# def mtime(object): +# """Return the modification time of the object. -## The modification time may not be known, in which case None -## is returned. If non-None, the return value is the kind of -## timestamp supplied by Python's time.time(). -## """ +# The modification time may not be known, in which case None +# is returned. If non-None, the return value is the kind of +# timestamp supplied by Python's time.time(). +# """ class IPickleCache(Interface): @@ -510,7 +512,6 @@ def new_ghost(oid, obj): If 'oid' is already in the cache, raise. """ - def invalidate(to_invalidate): """ Invalidate the indicated objects. @@ -537,10 +538,10 @@ def update_object_size_estimation(oid, new_size): cache_size = Attribute('Target size of the cache') cache_drain_resistance = Attribute('Factor for draining cache below ' - 'target size') + 'target size') cache_non_ghost_count = Attribute('Number of non-ghosts in the cache ' - '(XXX how is it different from ' - 'ringlen?') + '(XXX how is it different from ' + 'ringlen?') cache_data = Attribute("Property: copy of our 'data' dict") cache_klass_count = Attribute("Property: len of 'persistent_classes'") diff --git a/src/persistent/list.py b/src/persistent/list.py index 678725c..40e44e1 100644 --- a/src/persistent/list.py +++ b/src/persistent/list.py @@ -30,9 +30,9 @@ class PersistentList(UserList, persistent.Persistent): as changed and automatically persisted. .. versionchanged:: 4.5.2 - Using the `clear` method, or deleting a slice (e.g., ``del inst[:]`` or ``del inst[x:x]``) - now only results in marking the instance as changed if it actually removed - items. + Using the `clear` method, or deleting a slice (e.g., ``del inst[:]`` or + ``del inst[x:x]``) now only results in marking the instance as changed + if it actually removed items. """ __super_getitem = UserList.__getitem__ __super_setitem = UserList.__setitem__ @@ -67,7 +67,8 @@ def __getitem__(self, item): if sys.version_info[:3] < (3, 7, 4): # pragma: no cover # Likewise for __copy__. - # See https://github.com/python/cpython/commit/3645d29a1dc2102fdb0f5f0c0129ff2295bcd768 + # See + # https://github.com/python/cpython/commit/3645d29a1dc2102fdb0f5f0c0129ff2295bcd768 def __copy__(self): inst = self.__class__.__new__(self.__class__) inst.__dict__.update(self.__dict__) diff --git a/src/persistent/mapping.py b/src/persistent/mapping.py index 195131a..221358f 100644 --- a/src/persistent/mapping.py +++ b/src/persistent/mapping.py @@ -54,13 +54,13 @@ class PersistentMapping(IterableUserDict, persistent.Persistent): __super_pop = IterableUserDict.pop __super_popitem = IterableUserDict.popitem - - # Be sure to make a deep copy of our ``data`` (See PersistentList.) - # See https://github.com/python/cpython/commit/3645d29a1dc2102fdb0f5f0c0129ff2295bcd768 + # Be sure to make a deep copy of our ``data`` (See PersistentList.) See + # https://github.com/python/cpython/commit/3645d29a1dc2102fdb0f5f0c0129ff2295bcd768 # This was fixed in CPython 3.7.4, but we can't rely on that because it - # doesn't handle our old ``_container`` appropriately (it goes directly - # to ``self.__dict__``, bypassing the descriptor). The code here was initially + # doesn't handle our old ``_container`` appropriately (it goes directly to + # ``self.__dict__``, bypassing the descriptor). The code here was initially # based on the version found in 3.7.4. + def __copy__(self): inst = self.__class__.__new__(self.__class__) inst.__dict__.update(self.__dict__) @@ -141,7 +141,7 @@ def popitem(self): # ``data`` when we have ``_container`` instead @default - def data(self): # pylint:disable=method-hidden + def data(self): # pylint:disable=method-hidden # We don't want to cause a write on read, so we're careful not to # do anything that would cause us to become marked as changed, however, # if we're modified, then the saved record will have data, not diff --git a/src/persistent/persistence.py b/src/persistent/persistence.py index 9498176..3d72574 100644 --- a/src/persistent/persistence.py +++ b/src/persistent/persistence.py @@ -78,7 +78,6 @@ _OID_UNPACK = _OID_STRUCT.unpack - @use_c_impl @implementer(interfaces.IPersistent) class Persistent: @@ -134,7 +133,7 @@ def _set_oid(self, value): return # The C implementation allows *any* value to be # used as the _p_oid. - #if value is not None: + # if value is not None: # if not isinstance(value, OID_TYPE): # raise ValueError('Invalid OID type: %s' % value) # The C implementation only forbids changing the OID @@ -179,7 +178,7 @@ def _get_changed(self): if _OGA(self, '_Persistent__jar') is None: return False flags = _OGA(self, '_Persistent__flags') - if flags is None: # ghost + if flags is None: # ghost return None return bool(flags & _CHANGED) @@ -189,7 +188,7 @@ def _set_changed(self, value): self._p_activate() self._p_set_changed_flag(value) else: - if value is None: # -> ghost + if value is None: # -> ghost self._p_deactivate() else: self._p_set_changed_flag(value) @@ -212,8 +211,9 @@ def _get_mtime(self): # _p_state def _get_state(self): - # Note the use of OGA and caching to avoid recursive calls to __getattribute__: - # __getattribute__ calls _p_accessed calls cache.mru() calls _p_state + # Note the use of OGA and caching to avoid recursive calls to + # __getattribute__: __getattribute__ calls _p_accessed calls + # cache.mru() calls _p_state if _OGA(self, '_Persistent__jar') is None: return interfaces.UPTODATE flags = _OGA(self, '_Persistent__flags') @@ -254,6 +254,7 @@ def _get_sticky(self): if flags is None: return False return bool(flags & _STICKY) + def _set_sticky(self, value): flags = _OGA(self, '_Persistent__flags') if flags is None: @@ -287,7 +288,7 @@ def __getattribute__(self, name): """ oga = _OGA if (not name.startswith('_p_') and - name not in _SPECIAL_NAMES): + name not in _SPECIAL_NAMES): if oga(self, '_Persistent__flags') is None: oga(self, '_p_activate')() oga(self, '_p_accessed')() @@ -306,7 +307,7 @@ def __setattr__(self, name, value): if (_OGA(self, '_Persistent__jar') is not None and _OGA(self, '_Persistent__oid') is not None and not special_name and - not volatile): + not volatile): before = _OGA(self, '_Persistent__flags') after = before | _CHANGED if before != after: @@ -325,17 +326,17 @@ def __delattr__(self, name): if before != after: _OSA(self, '_Persistent__flags', after) if (_OGA(self, '_Persistent__jar') is not None and - _OGA(self, '_Persistent__oid') is not None): + _OGA(self, '_Persistent__oid') is not None): _OGA(self, '_p_register')() _ODA(self, name) def _slotnames(self, _v_exclude=True): slotnames = copyreg._slotnames(type(self)) return [x for x in slotnames - if not x.startswith('_p_') and - not (x.startswith('_v_') and _v_exclude) and - not x.startswith('_Persistent__') and - x not in _SLOTS] + if not x.startswith('_p_') and + not (x.startswith('_v_') and _v_exclude) and + not x.startswith('_Persistent__') and + x not in _SLOTS] def __getstate__(self): """ See IPersistent. @@ -343,12 +344,12 @@ def __getstate__(self): idict = getattr(self, '__dict__', None) slotnames = self._slotnames() if idict is not None: - # TODO: Convert to a dictionary comprehension, avoid the intermediate - # list. + # TODO: Convert to a dictionary comprehension, avoid the + # intermediate list. # pylint:disable=consider-using-dict-comprehension d = dict([x for x in idict.items() - if not x[0].startswith('_p_') and - not x[0].startswith('_v_')]) + if not x[0].startswith('_p_') and + not x[0].startswith('_v_')]) else: d = None if slotnames: @@ -375,7 +376,7 @@ def __setstate__(self, state): for k, v in inst_dict.items(): # Normally the keys for instance attributes are interned. # Do that here, but only if it is possible to do so. - idict[intern(k) if type(k) is str else k] = v + idict[intern(k) if k.__class__ is str else k] = v slotnames = self._slotnames() if slotnames: for k, v in slots.items(): @@ -393,7 +394,7 @@ def _p_activate(self): """ oga = _OGA before = oga(self, '_Persistent__flags') - if before is None: # Only do this if we're a ghost + if before is None: # Only do this if we're a ghost # Begin by marking up-to-date in case we bail early _OSA(self, '_Persistent__flags', 0) jar = oga(self, '_Persistent__jar') @@ -413,14 +414,14 @@ def _p_activate(self): _OSA(self, '_Persistent__flags', interfaces.CHANGED) try: jar.setstate(self) - except: + except BaseException: _OSA(self, '_Persistent__flags', before) raise else: # If we succeed, no matter what the implementation # of setstate did, mark ourself as up-to-date. The # C implementation unconditionally does this. - _OSA(self, '_Persistent__flags', 0) # up-to-date + _OSA(self, '_Persistent__flags', 0) # up-to-date # In the C implementation, _p_invalidate winds up calling # _p_deactivate. There are ZODB tests that depend on this; @@ -465,8 +466,8 @@ def _p_invalidate_deactivate_helper(self, clear=True): try: getattr(type_, slotname).__delete__(self) except AttributeError: - # AttributeError means slot variable was not initialized at all - - # - we can simply skip its deletion. + # AttributeError means slot variable was not + # initialized at all - we can simply skip its deletion. pass # Implementation detail: deactivating/invalidating @@ -479,7 +480,8 @@ def _p_invalidate_deactivate_helper(self, clear=True): except AttributeError: pass else: - cache.update_object_size_estimation(_OGA(self, '_Persistent__oid'), -1) + cache.update_object_size_estimation( + _OGA(self, '_Persistent__oid'), -1) # See notes in PickleCache.sweep for why we have to do this cache._persistent_deactivate_ran = True @@ -506,7 +508,8 @@ def _p_delattr(self, name): """ See IPersistent. """ if name.startswith('_p_'): - if name == '_p_oid' and self._p_is_in_cache(_OGA(self, '_Persistent__jar')): + if name == '_p_oid' and self._p_is_in_cache( + _OGA(self, '_Persistent__jar')): # The C implementation forbids deleting the oid # if we're already in a cache. Match its error message raise ValueError('can not change _p_jar of cached object') @@ -536,7 +539,6 @@ def _p_set_changed_flag(self, value): flags &= ~_CHANGED _OSA(self, '_Persistent__flags', flags) - def _p_accessed(self): # Notify the jar's pickle cache that we have been accessed. # This relies on what has been (until now) an implementation @@ -560,10 +562,9 @@ def _p_accessed(self): if oid is None: return flags = oga(self, '_Persistent__flags') - if flags is None: # ghost + if flags is None: # ghost return - # The KeyError arises in ZODB: ZODB.serialize.ObjectWriter # can assign a jar and an oid to newly seen persistent objects, # but because they are newly created, they aren't in the @@ -575,7 +576,6 @@ def _p_accessed(self): except (AttributeError, KeyError): pass - def _p_is_in_cache(self, jar=None): oid = _OGA(self, '_Persistent__oid') if not oid: @@ -618,10 +618,11 @@ def __repr__(self): except Exception as e: jar_str = ' in {!r}'.format(e) + cls = self.__class__ return '<{}.{} object at 0x{:x}{}{}{}>'.format( # Match the C name for this exact class - type(self).__module__ if type(self) is not Persistent else 'persistent', - type(self).__name__ if type(self) is not Persistent else 'Persistent', + 'persistent' if cls is Persistent else cls.__module__, + 'Persistent' if cls is Persistent else cls.__name__, id(self), oid_str, jar_str, p_repr_str ) @@ -630,9 +631,9 @@ def __repr__(self): def _estimated_size_in_24_bits(value): if value > 1073741696: return 16777215 - return (value//64) + 1 + return (value // 64) + 1 -# This name is bound by the ``@use_c_impl`` decorator to the class defined above. -# We make sure and list it statically, though, to help out linters. -PersistentPy = PersistentPy # pylint:disable=undefined-variable,self-assigning-variable +# This name is bound by the ``@use_c_impl`` decorator to the class defined +# above. We make sure and list it statically, though, to help out linters. +PersistentPy = PersistentPy # noqa: F821 undefined name 'PersistentPy' diff --git a/src/persistent/picklecache.py b/src/persistent/picklecache.py index 03aa6ce..3172836 100644 --- a/src/persistent/picklecache.py +++ b/src/persistent/picklecache.py @@ -39,7 +39,6 @@ # pylint:disable=protected-access - _OGA = object.__getattribute__ _OSA = object.__setattr__ @@ -59,19 +58,18 @@ def locked(self, *args, **kwargs): class _WeakValueDictionary: - # Maps from OID -> Persistent object, but - # only weakly references the Persistent object. This is similar - # to ``weakref.WeakValueDictionary``, but is customized depending on the - # platform. On PyPy, all objects can cheaply use a WeakRef, so that's - # what we actually use. On CPython, though, ``PersistentPy`` cannot be weakly - # referenced, so we rely on the fact that the ``id()`` of an object is its - # memory location, and we use ``ctypes`` to cast that integer back to - # the object. + # Maps from OID -> Persistent object, but only weakly references the + # Persistent object. This is similar to ``weakref.WeakValueDictionary``, + # but is customized depending on the platform. On PyPy, all objects can + # cheaply use a WeakRef, so that's what we actually use. On CPython, + # though, ``PersistentPy`` cannot be weakly referenced, so we rely on the + # fact that the ``id()`` of an object is its memory location, and we use + # ``ctypes`` to cast that integer back to the object. # - # To remove stale addresses, we rely on the ``ffi.gc()`` object with the exact - # same lifetime as the ``PersistentPy`` object. It calls us, we get the ``id`` - # back out of the CData, and clean up. - if PYPY: # pragma: no cover + # To remove stale addresses, we rely on the ``ffi.gc()`` object with the + # exact same lifetime as the ``PersistentPy`` object. It calls us, we get + # the ``id`` back out of the CData, and clean up. + if PYPY: # pragma: no cover def __init__(self): self._data = WeakValueDictionary() @@ -207,8 +205,14 @@ def __setitem__(self, oid, value): raise TypeError("Cache values must be persistent objects.") value_oid = value._p_oid - if not isinstance(oid, OID_TYPE) or not isinstance(value_oid, OID_TYPE): - raise TypeError('OID must be {}: key={} _p_oid={}'.format(OID_TYPE, oid, value_oid)) + if not isinstance( + oid, + OID_TYPE) or not isinstance( + value_oid, + OID_TYPE): + raise TypeError( + 'OID must be {}: key={} _p_oid={}'.format( + OID_TYPE, oid, value_oid)) if value_oid != oid: raise ValueError("Cache key does not match oid") @@ -227,13 +231,13 @@ def __setitem__(self, oid, value): if jar is None: raise ValueError("Cached object jar missing") # It also requires that it cannot be cached more than one place - existing_cache = getattr(jar, '_cache', None) # type: PickleCache + existing_cache = getattr(jar, '_cache', None) # type: PickleCache if (existing_cache is not None and existing_cache is not self and oid in existing_cache.data): raise ValueError("Cache values may only be in one cache.") - if isinstance(value, type): # ZODB.persistentclass.PersistentMetaClass + if isinstance(value, type): # ZODB.persistentclass.PersistentMetaClass self.persistent_classes[oid] = value else: self.data[oid] = value @@ -271,7 +275,7 @@ def mru(self, oid): # accessess during sweeping, such as with an # overridden _p_deactivate, don't mutate the ring # because that could leave it inconsistent - return False # marker return for tests + return False # marker return for tests value = self.data[oid] @@ -351,7 +355,7 @@ def new_ghost(self, oid, obj): def reify(self, to_reify): """ See IPickleCache. """ - if isinstance(to_reify, OID_TYPE): #bytes + if isinstance(to_reify, OID_TYPE): # bytes to_reify = [to_reify] for oid in to_reify: value = self[oid] @@ -430,24 +434,24 @@ def update_object_size_estimation(self, oid, new_size): @_sweeping_ring def _sweep(self, target, target_size_bytes=0): ejected = 0 - # If we find and eject objects that may have been weak referenced, - # we need to run a garbage collection to try to clear those references. - # Otherwise, it's highly likely that accessing those objects through those - # references will try to ``_p_activate()`` them, and since the jar they came - # from is probably closed, that will lead to an error. See - # https://github.com/zopefoundation/persistent/issues/149 + # If we find and eject objects that may have been weak referenced, we + # need to run a garbage collection to try to clear those references. + # Otherwise, it's highly likely that accessing those objects through + # those references will try to ``_p_activate()`` them, and since the + # jar they came from is probably closed, that will lead to an error. + # See https://github.com/zopefoundation/persistent/issues/149 had_weak_refs = False ring = self.ring for node, value in ring.iteritems(): - if ((target or target_size_bytes) # pylint:disable=too-many-boolean-expressions + if ((target or target_size_bytes) and (not target or self.non_ghost_count <= target) and (self.total_estimated_size <= target_size_bytes or not target_size_bytes)): break if value._p_state == UPTODATE: - # The C implementation will only evict things that are specifically - # in the up-to-date state + # The C implementation will only evict things that are + # specifically in the up-to-date state self._persistent_deactivate_ran = False # sweeping an object out of the cache should also @@ -456,19 +460,20 @@ def _sweep(self, target, target_size_bytes=0): # Also in C, if this was the last reference to the object, # it removes itself from the `data` dictionary. # If we're under PyPy or Jython, we need to run a GC collection - # to make this happen...this is only noticeable though, when - # we eject objects. Also, note that we can only take any of these - # actions if our _p_deactivate ran, in case of buggy subclasses. - # see _persistent_deactivate_ran. + # to make this happen...this is only noticeable though, when we + # eject objects. Also, note that we can only take any of these + # actions if our _p_deactivate ran, in case of buggy + # subclasses. see _persistent_deactivate_ran. if not had_weak_refs: - had_weak_refs |= getattr(value, '__weakref__', None) is not None + had_weak_refs |= getattr( + value, '__weakref__', None) is not None value._p_deactivate() if (self._persistent_deactivate_ran - # Test-cases sneak in non-Persistent objects, sigh, so naturally - # they don't cooperate (without this check a bunch of test_picklecache - # breaks) + # Test-cases sneak in non-Persistent objects, sigh, so + # naturally they don't cooperate (without this check a + # bunch of test_picklecache breaks) or not isinstance(value, self._SWEEPABLE_TYPES)): ring.delete_node(node) ejected += 1 @@ -500,7 +505,7 @@ def _invalidate(self, oid): pass -# This name is bound by the ``@use_c_impl`` decorator to the class defined above. -# We make sure and list it statically, though, to help out linters. -PickleCachePy = PickleCachePy # pylint:disable=undefined-variable,self-assigning-variable +# This name is bound by the ``@use_c_impl`` decorator to the class defined +# above. We make sure and list it statically, though, to help out linters. +PickleCachePy = PickleCachePy # noqa: F821 undefined name 'PickleCachePy' classImplements(PickleCachePy, IExtendedPickleCache) diff --git a/src/persistent/ring.py b/src/persistent/ring.py index 86f35ff..904c809 100644 --- a/src/persistent/ring.py +++ b/src/persistent/ring.py @@ -22,22 +22,23 @@ class IRing(Interface): - """Conceptually, a doubly-linked list for efficiently keeping track of least- - and most-recently used :class:`persistent.interfaces.IPersistent` objects. + """Conceptually, a doubly-linked list for efficiently keeping track of + least- and most-recently used :class:`persistent.interfaces.IPersistent` + objects. This is meant to be used by the :class:`persistent.picklecache.PickleCache` - and should not be considered a public API. This interface documentation exists - to assist development of the picklecache and alternate implementations by - explaining assumptions and performance requirements. + and should not be considered a public API. This interface documentation + exists to assist development of the picklecache and alternate + implementations by explaining assumptions and performance requirements. """ - def __len__(): # pylint:disable=no-method-argument + def __len__(): # pylint:disable=no-method-argument """Return the number of persistent objects stored in the ring. Should be constant time. """ - def __contains__(object): # pylint:disable=unexpected-special-method-signature + def __contains__(object): """Answer whether the given persistent object is found in the ring. Must not rely on object equality or object hashing, but only @@ -51,9 +52,9 @@ def add(object): reference to it so it can be deactivated later by the pickle cache. Should be constant time. - The object should not already be in the ring, but this is not necessarily - enforced. - """ + The object should not already be in the ring, but this is not + necessarily enforced. + """ def delete(object): """Remove the object from the ring if it is present. @@ -73,9 +74,9 @@ def move_to_head(object): allowed. """ - def __iter__(): # pylint:disable=no-method-argument - """Iterate over each persistent object in the ring, in the order of least - recently used to most recently used. + def __iter__(): # pylint:disable=no-method-argument + """Iterate over each persistent object in the ring, in the order of + least recently used to most recently used. Mutating the ring while an iteration is in progress has undefined consequences. @@ -90,6 +91,7 @@ def __iter__(): # pylint:disable=no-method-argument _handles = set() + @implementer(IRing) class _CFFIRing: """A ring backed by a C implementation. All operations are constant time. @@ -111,9 +113,9 @@ def __init__(self, cleanup_func=None): # or not any given object is in our ring, plus know how many there are. # In addition, once an object enters the ring, it must be kept alive # so that it can be deactivated. - # Note that because this is a strong reference to the - # persistent object, its cleanup function --- triggered by the ``ffi.gc`` - # object it owns --- will never be fired while it is in this dict. + # Note that because this is a strong reference to the persistent + # object, its cleanup function --- triggered by the ``ffi.gc`` object + # it owns --- will never be fired while it is in this dict. self.ring_to_obj = {} def ring_node_for(self, persistent_object, create=True): @@ -180,10 +182,10 @@ def iteritems(self): pobj = ring_to_obj[current] yield current, pobj - def __iter__(self): for _, v in self.iteritems(): yield v + # Export the best available implementation Ring = _CFFIRing diff --git a/src/persistent/tests/attrhooks.py b/src/persistent/tests/attrhooks.py index a573ae4..6373be4 100644 --- a/src/persistent/tests/attrhooks.py +++ b/src/persistent/tests/attrhooks.py @@ -23,6 +23,7 @@ def _resettingJar(): from persistent.tests.utils import ResettingJar return ResettingJar() + def _rememberingJar(): from persistent.tests.utils import RememberingJar return RememberingJar() @@ -31,18 +32,20 @@ def _rememberingJar(): class OverridesGetattr(Persistent): """Example of overriding __getattr__ """ + def __getattr__(self, name): """Get attributes that can't be gotten the usual way """ # Don't pretend we have any special attributes. if name.startswith("__") and name.endswrith("__"): - raise AttributeError(name) # pragma: no cover + raise AttributeError(name) # pragma: no cover return name.upper(), self._p_changed class VeryPrivate(Persistent): """Example of overriding __getattribute__, __setattr__, and __delattr__ """ + def __init__(self, **kw): self.__dict__['__secret__'] = kw.copy() @@ -78,7 +81,6 @@ def __getattribute__(self, name): return meth.__get__(self, self.__class__) - def __setattr__(self, name, value): """Set an attribute value """ diff --git a/src/persistent/tests/cucumbers.py b/src/persistent/tests/cucumbers.py index e6ca7e8..b5e1f68 100644 --- a/src/persistent/tests/cucumbers.py +++ b/src/persistent/tests/cucumbers.py @@ -20,7 +20,8 @@ def print_dict(d): d = sorted(d.items()) print('{%s}' % (', '.join( [('{!r}: {!r}'.format(k, v)) for (k, v) in d] - ))) + ))) + def cmpattrs(self, other, *attrs): result = 0 @@ -32,6 +33,7 @@ def cmpattrs(self, other, *attrs): result += lhs != rhs return result + class Simple(Persistent): def __init__(self, name, **kw): self.__name__ = name diff --git a/src/persistent/tests/test__compat.py b/src/persistent/tests/test__compat.py index cfa4085..e6bf109 100644 --- a/src/persistent/tests/test__compat.py +++ b/src/persistent/tests/test__compat.py @@ -34,7 +34,7 @@ def tearDown(self): if self.env_val is not self: # Reset to what it was to begin with. os.environ['PURE_PYTHON'] = self.env_val - else: # pragma: no cover + else: # pragma: no cover # It wasn't present before, make sure it's not present now. os.environ.pop('PURE_PYTHON', None) @@ -90,7 +90,8 @@ def test_should_attempt(self): ('Yes', False) ): self._set_env(val) - self.assertEqual(expected, compat._should_attempt_c_optimizations()) + self.assertEqual( + expected, compat._should_attempt_c_optimizations()) def test_should_attempt_pypy(self): compat.PYPY = True @@ -103,4 +104,5 @@ def test_should_attempt_pypy(self): ): __traceback_info__ = val self._set_env(val) - self.assertEqual(expected, compat._should_attempt_c_optimizations()) + self.assertEqual( + expected, compat._should_attempt_c_optimizations()) diff --git a/src/persistent/tests/test_list.py b/src/persistent/tests/test_list.py index 62005ee..ed84ac8 100644 --- a/src/persistent/tests/test_list.py +++ b/src/persistent/tests/test_list.py @@ -26,11 +26,14 @@ # pylint:disable=protected-access + class OtherList: def __init__(self, initlist): self.__data = initlist + def __len__(self): return len(self.__data) + def __getitem__(self, i): return self.__data[i] @@ -170,9 +173,9 @@ def mycmp(a, b): # Test __add__, __radd__, __mul__ and __rmul__ - #self.assertTrue(u1 + [] == [] + u1 == u1, "u1 + [] == [] + u1 == u1") + # self.assertTrue(u1 + [] == [] + u1 == u1, "u1 + [] == [] + u1 == u1") self.assertTrue(u1 + [1] == u2, "u1 + [1] == u2") - #self.assertTrue([-1] + u1 == [-1, 0], "[-1] + u1 == [-1, 0]") + # self.assertTrue([-1] + u1 == [-1, 0], "[-1] + u1 == [-1, 0]") self.assertTrue(u2 == u2*1 == 1*u2, "u2 == u2*1 == 1*u2") self.assertTrue(u2+u2 == u2*2 == 2*u2, "u2+u2 == u2*2 == 2*u2") self.assertTrue(u2+u2+u2 == u2*3 == 3*u2, "u2+u2+u2 == u2*3 == 3*u2") @@ -209,7 +212,6 @@ def mycmp(a, b): eq(u.count(1), 3, "u.count(1) == 3") eq(u.count(2), 0, "u.count(2) == 0") - # Test index eq(u2.index(0), 0, "u2.index(0) == 0") @@ -387,10 +389,3 @@ def test_copy(self): inst = self._makeOne() inst.append(42) copy_test(self, inst) - - -def test_suite(): - return unittest.defaultTestLoader.loadTestsFromName(__name__) - -if __name__ == '__main__': - unittest.main() diff --git a/src/persistent/tests/test_mapping.py b/src/persistent/tests/test_mapping.py index a198c45..36a6983 100644 --- a/src/persistent/tests/test_mapping.py +++ b/src/persistent/tests/test_mapping.py @@ -33,17 +33,19 @@ def _test(inst): raise AssertionError("Must not be caled") descr = self._makeOne(_test) + class Foo: testing = descr self.assertIs(Foo.testing, descr) - def test___get___from_instance(self): _called_with = [] + def _test(inst): _called_with.append(inst) return 'TESTING' descr = self._makeOne(_test) + class Foo: testing = descr foo = Foo() @@ -77,8 +79,8 @@ def test_volatile_attributes_not_persisted(self): def testTheWorld(self): # Test constructors l0 = {} - l1 = {0:0} - l2 = {0:0, 1:1} + l1 = {0: 0} + l2 = {0: 0, 1: 1} u = self._makeOne() u0 = self._makeOne(l0) u1 = self._makeOne(l1) @@ -93,6 +95,7 @@ class OtherMapping(dict): def __init__(self, initmapping): dict.__init__(self) self.__data = initmapping + def items(self): raise AssertionError("Not called") self._makeOne(OtherMapping(u0)) @@ -143,7 +146,7 @@ def mycmp(a, b): eq(u2.get(i), i, "u2.get(i) == i") eq(u2.get(i, 5), i, "u2.get(i, 5) == i") - for i in min(u2)-1, max(u2)+1: + for i in min(u2) - 1, max(u2) + 1: eq(u2.get(i), None, "u2.get(i) == None") eq(u2.get(i, 5), 5, "u2.get(i, 5) == 5") @@ -163,17 +166,17 @@ def mycmp(a, b): # Test __contains__ for i in u2: self.assertTrue(i in u2, "i in u2") - for i in min(u2)-1, max(u2)+1: + for i in min(u2) - 1, max(u2) + 1: self.assertTrue(i not in u2, "i not in u2") # Test update - l = {"a":"b"} - u = self._makeOne(l) + l_ = {"a": "b"} + u = self._makeOne(l_) u.update(u2) for i in u: - self.assertTrue(i in l or i in u2, "i in l or i in u2") - for i in l: + self.assertTrue(i in l_ or i in u2, "i in l or i in u2") + for i in l_: self.assertTrue(i in u, "i in u") for i in u2: self.assertTrue(i in u, "i in u") @@ -321,7 +324,3 @@ def _getTargetClass(self): def test_PD_is_alias_to_PM(self): from persistent.mapping import PersistentMapping self.assertIs(self._getTargetClass(), PersistentMapping) - - -def test_suite(): - return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/src/persistent/tests/test_persistence.py b/src/persistent/tests/test_persistence.py index 8baf71e..0887b5e 100644 --- a/src/persistent/tests/test_persistence.py +++ b/src/persistent/tests/test_persistence.py @@ -59,20 +59,24 @@ class _Jar: # Set this to a value to have our `setstate` # set the _p_serial of the object setstate_sets_serial = None + def __init__(self): self._loaded = [] self._registered = [] + def setstate(self, obj): self._loaded.append(obj._p_oid) if self.setstate_calls_object is not None: obj.__setstate__(self.setstate_calls_object) if self.setstate_sets_serial is not None: obj._p_serial = self.setstate_sets_serial + def register(self, obj): self._registered.append(obj._p_oid) jar = _Jar() - jar._cache = self._makeRealCache(jar) if real_cache else self._makeCache(jar) + jar._cache = self._makeRealCache( + jar) if real_cache else self._makeCache(jar) return jar def _makeBrokenJar(self): @@ -84,9 +88,11 @@ def _makeBrokenJar(self): class _BrokenJar: def __init__(self): self.called = 0 + def register(self, ob): self.called += 1 raise NotImplementedError() + def setstate(self, ob): raise NotImplementedError() @@ -100,8 +106,9 @@ def _makeOneWithJar(self, klass=None, broken_jar=False, real_cache=False): inst = klass() else: inst = self._makeOne() - jar = self._makeJar(real_cache=real_cache) if not broken_jar else self._makeBrokenJar() - jar._cache.new_ghost(OID, inst) # assigns _p_jar, _p_oid + jar = self._makeJar( + real_cache=real_cache) if not broken_jar else self._makeBrokenJar() + jar._cache.new_ghost(OID, inst) # assigns _p_jar, _p_oid # Be sure it really returned a ghost. assert inst._p_status == 'ghost' return inst, jar, OID @@ -119,7 +126,7 @@ def test_instance_conforms_to_IPersistent(self): verifyObject(IPersistent, self._makeOne()) def test_instance_cannot_be_weakly_referenced(self): - if PYPY: # pragma: no cover + if PYPY: # pragma: no cover self.skipTest('On PyPy, everything can be weakly referenced') import weakref inst = self._makeOne() @@ -143,6 +150,7 @@ def test_del_jar_no_jar(self): def test_del_jar_while_in_cache(self): inst, _, OID = self._makeOneWithJar() + def _test(): del inst._p_jar self.assertRaises(ValueError, _test) @@ -189,7 +197,7 @@ def test_assign_p_jar_w_valid_jar(self): inst._p_jar = jar self.assertEqual(inst._p_status, 'saved') self.assertTrue(inst._p_jar is jar) - inst._p_jar = jar # reassign only to same DM + inst._p_jar = jar # reassign only to same DM def test_assign_p_jar_not_in_cache_allowed(self): jar = self._makeJar() @@ -232,6 +240,7 @@ def test_assign_p_oid_w_None_wo_jar(self): def test_assign_p_oid_w_new_oid_w_jar(self): inst, jar, OID = self._makeOneWithJar() new_OID = b'\x02' * 8 + def _test(): inst._p_oid = new_OID self.assertRaises(ValueError, _test) @@ -240,7 +249,7 @@ def test_assign_p_oid_not_in_cache_allowed(self): jar = self._makeJar() inst = self._makeOne() inst._p_jar = jar - inst._p_oid = 1 # anything goes + inst._p_oid = 1 # anything goes inst._p_oid = 42 self.assertEqual(inst._p_oid, 42) @@ -277,24 +286,28 @@ def test_del_oid_like_ZODB_abort(self): def test_assign_p_serial_w_invalid_type(self): inst = self._makeOne() + def _test(): inst._p_serial = object() self.assertRaises(ValueError, _test) def test_assign_p_serial_w_None(self): inst = self._makeOne() + def _test(): inst._p_serial = None self.assertRaises(ValueError, _test) def test_assign_p_serial_too_short(self): inst = self._makeOne() + def _test(): inst._p_serial = b'\x01\x02\x03' self.assertRaises(ValueError, _test) def test_assign_p_serial_too_long(self): inst = self._makeOne() + def _test(): inst._p_serial = b'\x01\x02\x03' * 3 self.assertRaises(ValueError, _test) @@ -369,7 +382,7 @@ def test_assign_p_changed_false_from_ghost(self): inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() inst._p_changed = False - self.assertEqual(inst._p_status, 'ghost') # ??? this is what C does + self.assertEqual(inst._p_status, 'ghost') # ??? this is what C does self.assertEqual(list(jar._loaded), []) self.assertEqual(list(jar._registered), []) @@ -384,7 +397,7 @@ def test_assign_p_changed_none_from_saved(self): def test_assign_p_changed_true_from_saved(self): inst, jar, OID = self._makeOneWithJar() - inst._p_activate() # XXX + inst._p_activate() # XXX jar._loaded[:] = [] inst._p_changed = True self.assertEqual(inst._p_status, 'changed') @@ -436,7 +449,7 @@ def test_assign_p_changed_false_from_changed(self): def test_assign_p_changed_none_when_sticky(self): inst, jar, OID = self._makeOneWithJar() - inst._p_activate() # XXX + inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True inst._p_changed = None @@ -489,7 +502,7 @@ def test_delete_p_changed_from_changed(self): def test_delete_p_changed_when_sticky(self): inst, jar, OID = self._makeOneWithJar() - inst._p_activate() # XXX + inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True del inst._p_changed @@ -499,28 +512,30 @@ def test_delete_p_changed_when_sticky(self): def test_assign_p_sticky_true_when_ghost(self): inst, jar, OID = self._makeOneWithJar() - inst._p_deactivate() # XXX + inst._p_deactivate() # XXX + def _test(): inst._p_sticky = True self.assertRaises(ValueError, _test) def test_assign_p_sticky_false_when_ghost(self): inst, jar, OID = self._makeOneWithJar() - inst._p_deactivate() # XXX + inst._p_deactivate() # XXX + def _test(): inst._p_sticky = False self.assertRaises(ValueError, _test) def test_assign_p_sticky_true_non_ghost(self): inst, jar, OID = self._makeOneWithJar() - inst._p_activate() # XXX + inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True self.assertTrue(inst._p_sticky) def test_assign_p_sticky_false_non_ghost(self): inst, jar, OID = self._makeOneWithJar() - inst._p_activate() # XXX + inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = False self.assertFalse(inst._p_sticky) @@ -549,7 +564,7 @@ def test__p_status_changed_sticky(self): def test__p_status_saved(self): inst, jar, OID = self._makeOneWithJar() - inst._p_activate() # XXX + inst._p_activate() # XXX inst._p_changed = False self.assertEqual(inst._p_status, 'saved') @@ -610,7 +625,7 @@ def test__p_state_changed_sticky(self): def test__p_state_saved(self): inst, jar, OID = self._makeOneWithJar() - inst._p_activate() # XXX + inst._p_activate() # XXX inst._p_changed = False self.assertEqual(inst._p_state, 0) @@ -649,6 +664,7 @@ def test_assign_p_estimated_size_wrong_type(self): def test_assign_p_estimated_size_negative(self): inst = self._makeOne() + def _test(): inst._p_estimated_size = -1 self.assertRaises(ValueError, _test) @@ -677,7 +693,7 @@ def test___getattribute___p__names(self): '_p_estimated_size', '_p_sticky', '_p_status', - ] + ] inst, jar, OID = self._makeOneWithJar() self._clearMRU(jar) for name in NAMES: @@ -737,7 +753,7 @@ class Base: def __getattribute__(self, name): if name == 'magic': return 42 - return super().__getattribute__(name) # pragma: no cover + return super().__getattribute__(name) # pragma: no cover self.assertEqual(getattr(Base(), 'magic'), 42) @@ -756,7 +772,7 @@ def test___setattr___p__names(self): ('_p_serial', SERIAL), ('_p_estimated_size', 0), ('_p_sticky', False), - ] + ] self._clearMRU(jar) for name, value in NAMES: setattr(inst, name, value) @@ -864,7 +880,7 @@ class Derived(self._getTargetClass()): def test___delattr___p__names(self): NAMES = ['_p_changed', '_p_serial', - ] + ] inst, jar, OID = self._makeOneWithJar() self._clearMRU(jar) jar._registered = [] @@ -876,6 +892,7 @@ def test___delattr___p__names(self): def test___delattr__normal_name_from_unsaved(self): class Derived(self._getTargetClass()): normal = 'before' + def __init__(self): self.__dict__['normal'] = 'after' inst = Derived() @@ -889,10 +906,11 @@ class Derived(self._getTargetClass()): inst._p_deactivate() self._clearMRU(jar) jar._registered = [] + def _test(): delattr(inst, 'normal') self.assertRaises(AttributeError, _test) - self.assertEqual(inst._p_status, 'changed') # ??? this is what C does + self.assertEqual(inst._p_status, 'changed') # ??? this is what C does self._checkMRU(jar, [OID]) self.assertEqual(jar._registered, [OID]) self.assertEqual(getattr(inst, 'normal', None), 'before') @@ -903,6 +921,7 @@ def test___delattr__normal_name_from_ghost_real_cache(self): def test___delattr__normal_name_from_saved(self, real_cache=False): class Derived(self._getTargetClass()): normal = 'before' + def __init__(self): self.__dict__['normal'] = 'after' inst, jar, OID = self._makeOneWithJar(Derived, real_cache=real_cache) @@ -920,6 +939,7 @@ def test___delattr__normal_name_from_saved_real_cache(self): def test___delattr__normal_name_from_changed(self, real_cache=False): class Derived(self._getTargetClass()): normal = 'before' + def __init__(self): self.__dict__['normal'] = 'after' inst, jar, OID = self._makeOneWithJar(Derived, real_cache=real_cache) @@ -959,6 +979,7 @@ class Derived(self._getTargetClass()): def test___getstate___derived_w_slots_in_base_and_derived(self): class Base(self._getTargetClass()): __slots__ = ('foo',) + class Derived(Base): __slots__ = ('baz', 'qux',) inst = Derived() @@ -971,6 +992,7 @@ class Derived(Base): def test___getstate___derived_w_slots_in_base_but_not_derived(self): class Base(self._getTargetClass()): __slots__ = ('foo',) + class Derived(Base): pass inst = Derived() @@ -982,7 +1004,7 @@ class Derived(Base): def test___setstate___empty(self): inst = self._makeOne() - inst.__setstate__(None) # doesn't raise, but doesn't change anything + inst.__setstate__(None) # doesn't raise, but doesn't change anything def test___setstate___nonempty(self): from persistent.persistence import _INITIAL_SERIAL @@ -1021,6 +1043,7 @@ class Derived(self._getTargetClass()): def test___setstate___derived_w_slots_in_base_classes(self): class Base(self._getTargetClass()): __slots__ = ('foo',) + class Derived(Base): __slots__ = ('baz', 'qux',) inst = Derived() @@ -1032,6 +1055,7 @@ class Derived(Base): def test___setstate___derived_w_slots_in_base_but_not_derived(self): class Base(self._getTargetClass()): __slots__ = ('foo',) + class Derived(Base): pass inst = Derived() @@ -1047,7 +1071,8 @@ class Derived(self._getTargetClass()): inst1 = Derived() inst2 = Derived() key1 = 'key' - key2 = 'ke'; key2 += 'y' # construct in a way that won't intern the literal + key2 = 'ke' + key2 += 'y' # construct in a way that won't intern the literal self.assertFalse(key1 is key2) inst1.__setstate__({key1: 1}) inst2.__setstate__({key2: 2}) @@ -1058,7 +1083,8 @@ class Derived(self._getTargetClass()): inst1 = Derived() inst2 = Derived() key1 = 'key' - key2 = 'ke'; key2 += 'y' # construct in a way that won't intern the literal + key2 = 'ke' + key2 += 'y' # construct in a way that won't intern the literal self.assertFalse(key1 is key2) state1 = IterableUserDict({key1: 1}) state2 = IterableUserDict({key2: 2}) @@ -1125,6 +1151,7 @@ def test___reduce__w_subclass_having_getnewargs_and_getstate(self): class Derived(self._getTargetClass()): def __getnewargs__(self): return ('a', 'b') + def __getstate__(self): return {'foo': 'bar'} inst = Derived() @@ -1220,7 +1247,7 @@ def test_pickle_roundtrip_w_slots_and_filled_dict(self): def test__p_activate_from_unsaved(self): inst = self._makeOne() - inst._p_activate() # noop w/o jar + inst._p_activate() # noop w/o jar self.assertEqual(inst._p_status, 'unsaved') def test__p_activate_from_ghost(self): @@ -1232,14 +1259,14 @@ def test__p_activate_from_ghost(self): def test__p_activate_from_saved(self): inst, jar, OID = self._makeOneWithJar() inst._p_changed = False - inst._p_activate() # noop from 'saved' state + inst._p_activate() # noop from 'saved' state self.assertEqual(inst._p_status, 'saved') def test__p_activate_only_sets_state_once(self): inst, jar, OID = self._makeOneWithJar() # No matter how many times we call _p_activate, it # only sets state once, the first time - inst._p_invalidate() # make it a ghost + inst._p_invalidate() # make it a ghost self.assertEqual(list(jar._loaded), []) inst._p_activate() @@ -1248,7 +1275,8 @@ def test__p_activate_only_sets_state_once(self): inst._p_activate() self.assertEqual(list(jar._loaded), [OID]) - def test__p_activate_leaves_object_in_saved_even_if_object_mutated_self(self): + def test__p_activate_leaves_object_in_saved_even_if_object_mutated_self( + self): # If the object's __setstate__ set's attributes # when called by p_activate, the state is still # 'saved' when done. Furthemore, the object is not @@ -1256,11 +1284,12 @@ def test__p_activate_leaves_object_in_saved_even_if_object_mutated_self(self): class WithSetstate(self._getTargetClass()): state = None + def __setstate__(self, state): self.state = state inst, jar, OID = self._makeOneWithJar(klass=WithSetstate) - inst._p_invalidate() # make it a ghost + inst._p_invalidate() # make it a ghost self.assertEqual(inst._p_status, 'ghost') jar.setstate_calls_object = 42 @@ -1282,6 +1311,7 @@ def test__p_deactivate_from_unsaved(self): def test__p_deactivate_from_unsaved_w_dict(self): class Derived(self._getTargetClass()): normal = 'before' + def __init__(self): self.__dict__['normal'] = 'after' inst = Derived() @@ -1309,6 +1339,7 @@ def test__p_deactivate_from_saved(self): def test__p_deactivate_from_saved_w_dict(self): class Derived(self._getTargetClass()): normal = 'before' + def __init__(self): self.__dict__['normal'] = 'after' inst, jar, OID = self._makeOneWithJar(Derived) @@ -1348,7 +1379,7 @@ def test__p_deactivate_from_changed_w_dict(self): def test__p_deactivate_when_sticky(self): inst, jar, OID = self._makeOneWithJar() - inst._p_activate() # XXX + inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True inst._p_deactivate() @@ -1364,6 +1395,7 @@ def test__p_invalidate_from_unsaved(self): def test__p_invalidate_from_unsaved_w_dict(self): class Derived(self._getTargetClass()): normal = 'before' + def __init__(self): self.__dict__['normal'] = 'after' inst = Derived() @@ -1392,6 +1424,7 @@ def test__p_invalidate_from_saved(self): def test__p_invalidate_from_saved_w_dict(self): class Derived(self._getTargetClass()): normal = 'before' + def __init__(self): self.__dict__['normal'] = 'after' inst, jar, OID = self._makeOneWithJar(Derived) @@ -1418,6 +1451,7 @@ def test__p_invalidate_from_changed(self): def test__p_invalidate_from_changed_w_dict(self): class Derived(self._getTargetClass()): normal = 'before' + def __init__(self): self.__dict__['normal'] = 'after' inst, jar, OID = self._makeOneWithJar(Derived) @@ -1475,11 +1509,12 @@ def test__p_invalidate_from_changed_w_slots_compat(self): # should be always released. class Derived(self._getTargetClass()): __slots__ = ('myattr1', 'myattr2', '__dict__') + def __new__(cls): obj = cls.__base__.__new__(cls) obj.myattr1 = 'value1' obj.myattr2 = 'value2' - obj.foo = 'foo1' # .foo & .bar are in __dict__ + obj.foo = 'foo1' # .foo & .bar are in __dict__ obj.bar = 'bar2' return obj inst, jar, OID = self._makeOneWithJar(Derived) @@ -1518,10 +1553,9 @@ class Derived(self._getTargetClass()): self.assertEqual(inst._p_status, 'ghost') self.assertRaises(NotImplementedError, inst._p_activate) - def test__p_invalidate_from_sticky(self): inst, jar, OID = self._makeOneWithJar() - inst._p_activate() # XXX + inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True self.assertEqual(inst._p_status, 'sticky') @@ -1535,7 +1569,7 @@ class Derived(self._getTargetClass()): def __init__(self): self.normal = 'value' inst, jar, OID = self._makeOneWithJar(Derived) - inst._p_activate() # XXX + inst._p_activate() # XXX inst._p_changed = False inst._p_sticky = True inst._p_invalidate() @@ -1554,7 +1588,7 @@ def test__p_getattr_w__p__names(self): '_p_estimated_size', '_p_sticky', '_p_status', - ] + ] inst, jar, OID = self._makeOneWithJar() inst._p_deactivate() for name in NAMES: @@ -1603,7 +1637,7 @@ def test__p_setattr_w_normal_name(self): def test__p_delattr_w__p__names(self): NAMES = ['_p_changed', '_p_serial', - ] + ] inst, jar, OID = self._makeOneWithJar() inst._p_changed = True jar._loaded = [] @@ -1640,6 +1674,7 @@ def __init__(self): p._p_jar = self._makeBrokenJar() self.assertEqual(p._p_state, 0) self.assertEqual(p._p_jar.called, 0) + def _try(): p._p_changed = 1 self.assertRaises(NotImplementedError, _try) @@ -1668,6 +1703,7 @@ def test__ancient_dict_layout_bug(self): class P(self._getTargetClass()): def __init__(self): self.x = 0 + def inc(self): self.x += 1 p = P() @@ -1679,12 +1715,16 @@ def inc(self): def test_w_diamond_inheritance(self): class A(self._getTargetClass()): pass + class B(self._getTargetClass()): pass + class C(A, B): pass + class D: pass + class E(D, B): pass # no raise @@ -1693,23 +1733,30 @@ class E(D, B): def test_w_alternate_metaclass(self): class alternateMeta(type): pass + class alternate: __metaclass__ = alternateMeta + class mixedMeta(alternateMeta, type): pass # no raise + class mixed1(alternate, self._getTargetClass()): pass + class mixed2(self._getTargetClass(), alternate): pass def test_setattr_in_subclass_is_not_called_creating_an_instance(self): class subclass(self._getTargetClass()): _v_setattr_called = False + def __setattr__(self, name, value): raise AssertionError("Should not be called") inst = subclass() - self.assertEqual(object.__getattribute__(inst, '_v_setattr_called'), False) + self.assertEqual( + object.__getattribute__( + inst, '_v_setattr_called'), False) def test_can_set__p_attrs_if_subclass_denies_setattr(self): # ZODB defines a PersistentBroken subclass that only lets us @@ -1734,19 +1781,20 @@ def __setattr__(self, name, value): def test_p_invalidate_calls_p_deactivate(self): class P(self._getTargetClass()): deactivated = False + def _p_deactivate(self): self.deactivated = True p = P() p._p_invalidate() self.assertTrue(p.deactivated) - def test_new_ghost_success_not_already_ghost_dict(self): # https://github.com/zopefoundation/persistent/issues/49 # calling new_ghost on an object that already has state just changes # its flags, it doesn't destroy the state. from persistent.interfaces import GHOST from persistent.interfaces import UPTODATE + class TestPersistent(self._getTargetClass()): pass KEY = b'123' @@ -1769,6 +1817,7 @@ def test_new_ghost_success_not_already_ghost_slot(self): # its flags, it doesn't destroy the state. from persistent.interfaces import GHOST from persistent.interfaces import UPTODATE + class TestPersistent(self._getTargetClass()): __slots__ = ('set_by_new', '__weakref__') KEY = b'123' @@ -1802,7 +1851,8 @@ def _normalized_repr(self, o): def test_repr_no_oid_no_jar(self): p = self._makeOne() result = self._normalized_repr(p) - self.assertEqual(result, '') + self.assertEqual( + result, '') def test_repr_no_oid_in_jar(self): p = self._makeOne() @@ -1825,7 +1875,9 @@ def test_repr_oid_no_jar(self): result = self._normalized_repr(p) self.assertEqual( result, - "") + "") def test_64bit_oid(self): import struct @@ -1839,8 +1891,8 @@ def test_64bit_oid(self): result = self._normalized_repr(p) self.assertEqual( result, - '' - ) + '') def test_repr_no_oid_repr_jar_raises_exception(self): p = self._makeOne() @@ -1854,8 +1906,8 @@ def __repr__(self): result = self._normalized_repr(p) self.assertEqual( result, - "") - + "") def test_repr_oid_raises_exception_no_jar(self): p = self._makeOne() @@ -1870,7 +1922,9 @@ def __repr__(self): result = self._normalized_repr(p) self.assertEqual( result, - "") + "") # Anything other than 8 bytes, though, we do. p._p_oid = BadOID(b'1234567') @@ -1878,8 +1932,8 @@ def __repr__(self): result = self._normalized_repr(p) self.assertEqual( result, - "") - + "") def test_repr_oid_and_jar_raise_exception(self): p = self._makeOne() @@ -1895,12 +1949,11 @@ def __repr__(self): p._p_jar = Jar() - result = self._normalized_repr(p) self.assertEqual( result, - "") + "") def test_repr_no_oid_repr_jar_raises_baseexception(self): p = self._makeOne() @@ -1942,7 +1995,9 @@ def __repr__(self): result = self._normalized_repr(p) self.assertEqual( result, - ">") + ">") def test__p_repr(self): class P(self._getTargetClass()): @@ -1966,8 +2021,9 @@ def _p_repr(self): result = self._normalized_repr(p) self.assertEqual( result, - "") + "") class Jar: def __repr__(self): @@ -1977,8 +2033,9 @@ def __repr__(self): result = self._normalized_repr(p) self.assertEqual( result, - " _p_repr Exception('_p_repr failed')>") + " _p_repr Exception('_p_repr failed')>") def test__p_repr_in_instance_ignored(self): class P(self._getTargetClass()): @@ -1986,8 +2043,9 @@ class P(self._getTargetClass()): p = P() p._p_repr = lambda: "Instance" result = self._normalized_repr(p) - self.assertEqual(result, - '') + self.assertEqual( + result, + '') def test__p_repr_baseexception(self): class P(self._getTargetClass()): @@ -1997,11 +2055,12 @@ def _p_repr(self): with self.assertRaisesRegex(BaseException, '_p_repr failed'): repr(p) + class PyPersistentTests(unittest.TestCase, _Persistent_Base): def _getTargetClass(self): from persistent.persistence import PersistentPy - assert PersistentPy.__module__ == 'persistent.persistence', PersistentPy.__module__ + self.assertEqual(PersistentPy.__module__, 'persistent.persistence') return PersistentPy def _makeCache(self, jar): @@ -2011,8 +2070,10 @@ def __init__(self, jar): self._jar = jar self._mru = [] self._data = {} + def mru(self, oid): self._mru.append(oid) + def new_ghost(self, oid, obj): obj._p_jar = self._jar obj._p_oid = oid @@ -2025,10 +2086,13 @@ def new_ghost(self, oid, obj): # flags. object.__setattr__(obj, '_Persistent__flags', None) self._data[oid] = obj + def get(self, oid): return self._data.get(oid) + def __delitem__(self, oid): del self._data[oid] + def update_object_size_estimation(self, oid, new_size): pass @@ -2076,12 +2140,15 @@ def mru(oid): def test_accessed_invalidated_with_jar_and_oid_but_no_cache(self): # This scenario arises in ZODB tests where the jar is faked KEY = b'123' + class Jar: accessed = False + def __getattr__(self, name): if name == '_cache': self.accessed = True raise AttributeError(name) + def register(self, *args): pass c1 = self._makeOne() @@ -2098,7 +2165,7 @@ def register(self, *args): self.assertTrue(c1._p_jar.accessed) c1._p_jar.accessed = False - c1._Persistent__flags = None # coverage + c1._Persistent__flags = None # coverage c1._p_invalidate_deactivate_helper() self.assertTrue(c1._p_jar.accessed) @@ -2133,10 +2200,10 @@ def _getTargetClass(self): return get_c()['persistent.persistence'].Persistent def _checkMRU(self, jar, value): - pass # Figure this out later + pass # Figure this out later def _clearMRU(self, jar): - pass # Figure this out later + pass # Figure this out later def _makeCache(self, jar): from persistent._compat import _c_optimizations_available as get_c @@ -2160,5 +2227,6 @@ def test_w_type(self): for typ in TO_CREATE: self.assertTrue(isinstance(self._callFUT(typ), typ)) + def test_suite(): return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/src/persistent/tests/test_picklecache.py b/src/persistent/tests/test_picklecache.py index 7fd1150..c3a860a 100644 --- a/src/persistent/tests/test_picklecache.py +++ b/src/persistent/tests/test_picklecache.py @@ -24,6 +24,7 @@ _marker = object() + class DummyPersistent: _Persistent__ring = None @@ -48,12 +49,13 @@ class ClosedConnection(DummyConnection): def __init__(self, test): self.test = test - def setstate(self, obj): # pragma: no cover + def setstate(self, obj): # pragma: no cover self.test.fail("Connection is closed") def register(self, obj): """Does nothing.""" + def _len(seq): return len(list(seq)) @@ -62,8 +64,10 @@ class PickleCacheTestMixin: def _getTargetClass(self): from persistent.picklecache import PickleCachePy as BasePickleCache + class PickleCache(BasePickleCache): - _CACHEABLE_TYPES = BasePickleCache._CACHEABLE_TYPES + (DummyPersistent,) + _CACHEABLE_TYPES = BasePickleCache._CACHEABLE_TYPES + \ + (DummyPersistent,) return PickleCache def _getTargetInterface(self): @@ -188,7 +192,6 @@ def test___setitem___mismatch_key_oid(self): with self.assertRaises(ValueError): cache[KEY] = uptodate - def test___setitem___non_ghost(self): KEY = b'uptodate' cache = self._makeOne() @@ -209,6 +212,7 @@ def test___setitem___non_ghost(self): def test___setitem___persistent_class(self): KEY = b'pclass' + class pclass: _p_oid = KEY _p_jar = DummyConnection() @@ -242,6 +246,7 @@ def test___delitem___w_persistent_class(self): KEY = b'pclass' cache = self._makeOne() + class pclass: _p_oid = KEY _p_jar = DummyConnection() @@ -304,7 +309,6 @@ def test_lruitems(self): self.assertEqual(items[1][0], TWO) self.assertEqual(items[2][0], THREE) - def _numbered_oid(self, i): return b'oid_%04d' % i @@ -326,7 +330,7 @@ def test_incrgc_simple(self): self.assertEqual(cache.cache_non_ghost_count, 100) cache.incrgc() - gc.collect() # banish the ghosts who are no longer in the ring + gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 10) items = cache.lru_items() @@ -376,14 +380,15 @@ def test_full_sweep(self): self.assertEqual(cache.cache_non_ghost_count, 100) cache.full_sweep() - gc.collect() # banish the ghosts who are no longer in the ring + gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 0) for oid in oids: self.assertTrue(cache.get(oid) is None) - def test_full_sweep_clears_weakrefs_in_interface(self, sweep_method='full_sweep'): + def test_full_sweep_clears_weakrefs_in_interface( + self, sweep_method='full_sweep'): # https://github.com/zopefoundation/persistent/issues/149 # Sweeping the cache clears weak refs (for PyPy especially) # In the real world, this shows up in the interaction with @@ -424,11 +429,13 @@ class P(self._getRealPersistentClass()): Interface.changed(None) def test_incrgc_clears_weakrefs_in_interface(self): - self.test_full_sweep_clears_weakrefs_in_interface(sweep_method='incrgc') + self.test_full_sweep_clears_weakrefs_in_interface( + sweep_method='incrgc') def test_full_sweep_clears_weakrefs(self, sweep_method='incrgc'): # like test_full_sweep_clears_weakrefs_in_interface, - # but directly using a weakref. This is the simplest version of the test. + # but directly using a weakref. This is the simplest version of the + # test. from weakref import ref as WeakRef gc.disable() self.addCleanup(gc.enable) @@ -468,7 +475,7 @@ def test_minimize(self): self.assertEqual(cache.cache_non_ghost_count, 100) cache.minimize() - gc.collect() # banish the ghosts who are no longer in the ring + gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 0) @@ -484,7 +491,7 @@ def test_minimize_turns_into_ghosts(self): self.assertEqual(cache.cache_non_ghost_count, 1) cache.minimize() - gc.collect() # banish the ghosts who are no longer in the ring + gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 0) @@ -549,6 +556,7 @@ def test_new_ghost_success_not_already_ghost(self): def test_new_ghost_w_pclass_non_ghost(self): KEY = b'123' + class Pclass: _p_oid = None _p_jar = None @@ -560,6 +568,7 @@ class Pclass: def test_new_ghost_w_pclass_ghost(self): KEY = b'123' + class Pclass: _p_oid = None _p_jar = None @@ -572,13 +581,13 @@ class Pclass: def test_invalidate_miss_single(self): KEY = b'123' cache = self._makeOne() - cache.invalidate(KEY) # doesn't raise + cache.invalidate(KEY) # doesn't raise def test_invalidate_miss_multiple(self): KEY = b'123' KEY2 = b'456' cache = self._makeOne() - cache.invalidate([KEY, KEY2]) # doesn't raise + cache.invalidate([KEY, KEY2]) # doesn't raise def test_invalidate_hit_single_non_ghost(self): from persistent.interfaces import GHOST @@ -614,6 +623,7 @@ def test_invalidate_hit_multiple_non_ghost(self): def test_debug_info_w_persistent_class(self): KEY = b'pclass' + class pclass: _p_oid = KEY _p_jar = DummyConnection() @@ -621,7 +631,7 @@ class pclass: pclass._p_state = UPTODATE cache[KEY] = pclass - gc.collect() # pypy vs. refcounting + gc.collect() # pypy vs. refcounting info = cache.debug_info() self.assertEqual(len(info), 1) @@ -641,7 +651,7 @@ def test_debug_info_w_normal_object(self): uptodate = self._makePersist(state=UPTODATE, oid=KEY) cache[KEY] = uptodate - gc.collect() # pypy vs. refcounting + gc.collect() # pypy vs. refcounting info = cache.debug_info() self.assertEqual(len(info), 1) @@ -653,7 +663,6 @@ def test_debug_info_w_normal_object(self): self.assertEqual(typ, type(uptodate).__name__) return uptodate, info[0] - def test_debug_info_w_ghost(self): from persistent.interfaces import GHOST @@ -662,7 +671,7 @@ def test_debug_info_w_ghost(self): ghost = self._makePersist(state=GHOST, oid=KEY) cache[KEY] = ghost - gc.collect() # pypy vs. refcounting + gc.collect() # pypy vs. refcounting info = cache.debug_info() self.assertEqual(len(info), 1) @@ -676,8 +685,8 @@ def test_debug_info_w_ghost(self): def test_setting_non_persistent_item(self): cache = self._makeOne() - with self.assertRaisesRegex(TypeError, - "Cache values must be persistent objects."): + with self.assertRaisesRegex( + TypeError, "Cache values must be persistent objects."): cache[b'12345678'] = object() def test_setting_without_jar(self): @@ -714,15 +723,16 @@ def test_sweep_empty(self): def test_invalidate_persistent_class_calls_p_invalidate(self): KEY = b'pclass' + class pclass: _p_oid = KEY _p_jar = DummyConnection() invalidated = False + @classmethod def _p_invalidate(cls): cls.invalidated = True - cache = self._makeOne(pclass._p_jar) cache[KEY] = pclass @@ -758,13 +768,12 @@ def test_sweep_of_non_deactivating_object(self): cache = self._makeOne(jar) p = self._makePersist(jar=jar) - p._p_state = 0 # non-ghost, get in the ring + p._p_state = 0 # non-ghost, get in the ring cache[p._p_oid] = p def bad_deactivate(): "Doesn't call super, for it's own reasons, so can't be ejected" - p._p_deactivate = bad_deactivate cache._SWEEPABLE_TYPES = DummyPersistent @@ -947,6 +956,7 @@ def test_mru_last(self): def test_invalidate_hit_pclass(self): KEY = b'123' + class Pclass: _p_oid = KEY _p_jar = DummyConnection() @@ -978,7 +988,7 @@ def test_full_sweep_w_sticky(self): self.assertEqual(cache.cache_non_ghost_count, 100) cache.full_sweep() - gc.collect() # banish the ghosts who are no longer in the ring + gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 1) @@ -994,7 +1004,7 @@ def test_full_sweep_w_changed(self): self.assertEqual(cache.cache_non_ghost_count, 100) cache.full_sweep() - gc.collect() # banish the ghosts who are no longer in the ring + gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.cache_non_ghost_count, 1) @@ -1007,6 +1017,7 @@ def test_init_with_cacheless_jar(self): # have a _cache class Jar: was_set = False + def __setattr__(self, name, value): if name == '_cache': object.__setattr__(self, 'was_set', True) @@ -1083,10 +1094,10 @@ def test_cache_garbage_collection_bytes_also_deactivates_object(self): class MyPersistent(self._getDummyPersistentClass()): def _p_deactivate(self): - # mimic what the real persistent object does to update the cache - # size; if we don't get deactivated by sweeping, the cache size - # won't shrink so this also validates that _p_deactivate gets - # called when ejecting an object. + # mimic what the real persistent object does to update the + # cache size; if we don't get deactivated by sweeping, the + # cache size won't shrink so this also validates that + # _p_deactivate gets called when ejecting an object. cache.update_object_size_estimation(self._p_oid, -1) cache = self._makeOne() @@ -1095,9 +1106,11 @@ def _p_deactivate(self): for i in range(100): oid = self._numbered_oid(i) oids.append(oid) - o = cache[oid] = self._makePersist(oid=oid, kind=MyPersistent, state=UPTODATE) + o = cache[oid] = self._makePersist( + oid=oid, kind=MyPersistent, state=UPTODATE) - o._Persistent__size = 0 # must start 0, ZODB sets it AFTER updating the size + # must start 0, ZODB sets it AFTER updating the size + o._Persistent__size = 0 cache.update_object_size_estimation(oid, 64) o._Persistent__size = 2 @@ -1123,11 +1136,10 @@ def _p_deactivate(self): # It also shrank the measured size of the cache, # though this may require a GC to be visible. - if PYPY: # pragma: no cover + if PYPY: # pragma: no cover gc.collect() self.assertEqual(len(cache), 1) - def test_new_ghost_obj_already_in_cache(self): base_result = super().test_new_ghost_obj_already_in_cache() cache, key, candidate = base_result @@ -1165,37 +1177,36 @@ def _p_deactivate(self): o._Persistent__size = 0 cache.update_object_size_estimation(oid, 1) o._Persistent__size = 1 - del o # leave it only in the cache + del o # leave it only in the cache self.assertEqual(cache.cache_non_ghost_count, 100) self.assertEqual(cache.total_estimated_size, 64 * 100) cache.incrgc() - gc.collect() # banish the ghosts who are no longer in the ring + gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.total_estimated_size, 64 * 6) self.assertEqual(cache.cache_non_ghost_count, 6) self.assertEqual(len(cache), 6) cache.full_sweep() - gc.collect() # banish the ghosts who are no longer in the ring + gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.total_estimated_size, 0) self.assertEqual(cache.cache_non_ghost_count, 0) self.assertEqual(len(cache), 0) def test_interpreter_finalization_ffi_cleanup(self): - # When the interpreter is busy garbage collecting old objects - # and clearing their __dict__ in random orders, the CFFI cleanup - # ``ffi.gc()`` cleanup hooks we use on CPython don't - # raise errors. + # When the interpreter is busy garbage collecting old objects and + # clearing their __dict__ in random orders, the CFFI cleanup + # ``ffi.gc()`` cleanup hooks we use on CPython don't raise errors. # - # Prior to Python 3.8, when ``sys.unraisablehook`` was added, - # the only way to know if this test fails is to look for AttributeError - # on stderr. + # Prior to Python 3.8, when ``sys.unraisablehook`` was added, the only + # way to know if this test fails is to look for AttributeError on + # stderr. # - # But wait, it gets worse. Prior to https://foss.heptapod.net/pypy/cffi/-/issues/492 - # (CFFI > 1.14.5, unreleased at this writing), CFFI ignores - # ``sys.unraisablehook``, so even on 3.8 the only way to know - # a failure is to watch stderr. + # But wait, it gets worse. Prior to + # https://foss.heptapod.net/pypy/cffi/-/issues/492 (CFFI > 1.14.5, + # unreleased at this writing), CFFI ignores ``sys.unraisablehook``, so + # even on 3.8 the only way to know a failure is to watch stderr. # # See https://github.com/zopefoundation/persistent/issues/150 @@ -1205,13 +1216,15 @@ def test_interpreter_finalization_ffi_cleanup(self): old_hook = sys.unraisablehook except AttributeError: pass - else: # pragma: no cover + else: # pragma: no cover sys.unraisablehook = unraised.append self.addCleanup(setattr, sys, 'unraisablehook', old_hook) cache = self._makeOne() oid = self._numbered_oid(42) - o = cache[oid] = self._makePersist(oid=oid) + # local variable 'o' is assigned to but never used, but needed for the + # test to succeed. + o = cache[oid] = self._makePersist(oid=oid) # noqa: F841 # Clear the dict, or at least part of it. # This is coupled to ``cleanup_hook`` if cache.data.cleanup_hook: @@ -1264,7 +1277,7 @@ def register(self, obj): pass dummy_connection = DummyConnection() - dummy_connection.register(1) # for coveralls + dummy_connection.register(1) # for coveralls def makePersistent(oid): persist = self._getDummyPersistentClass()() @@ -1285,7 +1298,7 @@ def makePersistent(oid): o = cache[oid] = makePersistent(oid) cache.update_object_size_estimation(oid, 1) o._p_estimated_size = 1 - del o # leave it only in the cache + del o # leave it only in the cache self.assertEqual(cache.cache_non_ghost_count, 100) self.assertEqual(cache.total_estimated_size, 64 * 100) @@ -1296,7 +1309,7 @@ def makePersistent(oid): self.assertEqual(len(cache), 6) cache.full_sweep() - gc.collect() # banish the ghosts who are no longer in the ring + gc.collect() # banish the ghosts who are no longer in the ring self.assertEqual(cache.total_estimated_size, 0) self.assertEqual(cache.cache_non_ghost_count, 0) self.assertEqual(len(cache), 0) @@ -1313,8 +1326,9 @@ def _makeOne(self): @unittest.skipIf(PYPY, "PyPy doesn't have the cleanup_hook") def test_cleanup_hook_gc(self): - # A more targeted test than ``test_interpreter_finalization_ffi_cleanup`` - # See https://github.com/zopefoundation/persistent/issues/150 + # A more targeted test than + # ``test_interpreter_finalization_ffi_cleanup`` See + # https://github.com/zopefoundation/persistent/issues/150 wvd = self._makeOne() class cdata: @@ -1329,5 +1343,6 @@ class cdata: def test_suite(): return unittest.defaultTestLoader.loadTestsFromName(__name__) + if __name__ == '__main__': unittest.main() diff --git a/src/persistent/tests/test_ring.py b/src/persistent/tests/test_ring.py index efa2810..81005eb 100644 --- a/src/persistent/tests/test_ring.py +++ b/src/persistent/tests/test_ring.py @@ -31,7 +31,7 @@ def _next_oid(cls): def __init__(self): self._p_oid = self._next_oid() - def __repr__(self): # pragma: no cover + def __repr__(self): # pragma: no cover return "".format(self._p_oid, id(self)) @@ -118,7 +118,6 @@ def test_move_to_head(self): self.assertEqual(3, len(r)) self.assertEqual([p1, p2, p3], list(r)) - r.move_to_head(p1) self.assertEqual([p2, p3, p1], list(r)) diff --git a/src/persistent/tests/test_timestamp.py b/src/persistent/tests/test_timestamp.py index 91aa0da..8ab5083 100644 --- a/src/persistent/tests/test_timestamp.py +++ b/src/persistent/tests/test_timestamp.py @@ -65,7 +65,7 @@ def test_ctor_invalid_arglist(self): ('1', '2', '3', '4', '5', '6'), (1, 2, 3, 4, 5, 6, 7), (b'123',), - ] + ] for args in BAD_ARGS: with self.assertRaises((TypeError, ValueError)): self._makeOne(*args) @@ -78,7 +78,7 @@ def test_ctor_from_invalid_strings(self): '\x00' * 4, '\x00' * 5, '\x00' * 7, - ] + ] for args in BAD_ARGS: self.assertRaises((TypeError, ValueError), self._makeOne, *args) @@ -226,7 +226,8 @@ def __init__(self): self.c_int64 = ctypes.c_int64 # win32, even on 64-bit long, has funny sizes self.is_32_bit_hash = self.c_int32 == ctypes.c_long - self.expected_hash = self.bit_32_hash if self.is_32_bit_hash else self.bit_64_hash + self.expected_hash = ( + self.bit_32_hash if self.is_32_bit_hash else self.bit_64_hash) @contextmanager def _use_hash(self, maxint, c_long): @@ -239,7 +240,6 @@ def _use_hash(self, maxint, c_long): self.MUT._MAXINT = self.orig_maxint self.MUT.c_long = self.orig_c_long - def use_32bit(self): return self._use_hash(self.MAX_32_BITS, self.c_int32) @@ -293,7 +293,8 @@ def _make_many_instants(self): # it to test matching yield Instant.now_ts_args for i in range(2000): - yield Instant.now_ts_args[:-1] + (Instant.now_ts_args[-1] + (i % 60.0)/100.0, ) + yield Instant.now_ts_args[:-1] + ( + Instant.now_ts_args[-1] + (i % 60.0) / 100.0, ) def _makeC(self, *args, **kwargs): from persistent._compat import _c_optimizations_available as get_c @@ -426,5 +427,6 @@ def test_seconds_precision_half(self): self.test_seconds_precision(seconds=6.55555555) self.test_seconds_precision(seconds=6.555555555) + def test_suite(): return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/src/persistent/tests/test_wref.py b/src/persistent/tests/test_wref.py index e246b27..3a34bdf 100644 --- a/src/persistent/tests/test_wref.py +++ b/src/persistent/tests/test_wref.py @@ -42,7 +42,7 @@ def test_ctor_target_w_jar(self): def test___call___target_in_volatile(self): target = _makeTarget() - target._p_jar = jar = _makeJar() + target._p_jar = _makeJar() wref = self._makeOne(target) self.assertTrue(wref() is target) @@ -56,20 +56,20 @@ def test___call___target_in_jar(self): def test___call___target_not_in_jar(self): target = _makeTarget() - target._p_jar = jar = _makeJar() + target._p_jar = _makeJar() wref = self._makeOne(target) del wref._v_ob self.assertTrue(wref() is None) def test___hash___w_target(self): target = _makeTarget() - target._p_jar = jar = _makeJar() + target._p_jar = _makeJar() wref = self._makeOne(target) self.assertEqual(hash(wref), hash(target)) def test___hash___wo_target(self): target = _makeTarget() - target._p_jar = jar = _makeJar() + target._p_jar = _makeJar() wref = self._makeOne(target) del wref._v_ob self.assertRaises(TypeError, hash, wref) @@ -84,7 +84,7 @@ def test___eq___w_non_weakref(self): def test___eq___w_both_same_target(self): target = _makeTarget() lhs = self._makeOne(target) - rhs_target = _makeTarget() + _makeTarget() rhs = self._makeOne(target) self.assertEqual(lhs, rhs) @@ -97,7 +97,7 @@ def test___eq___w_both_different_targets(self): def test___eq___w_lhs_gone_target_not_in_jar(self): target = _makeTarget() - target._p_jar = jar = _makeJar() + target._p_jar = _makeJar() lhs = self._makeOne(target) del lhs._v_ob rhs = self._makeOne(target) @@ -109,13 +109,13 @@ def test___eq___w_lhs_gone_target_in_jar(self): jar[target._p_oid] = target lhs = self._makeOne(target) del lhs._v_ob - rhs_target = _makeTarget() + _makeTarget() rhs = self._makeOne(target) self.assertEqual(lhs, rhs) def test___eq___w_rhs_gone_target_not_in_jar(self): target = _makeTarget() - target._p_jar = jar = _makeJar() + target._p_jar = _makeJar() lhs = self._makeOne(target) rhs = self._makeOne(target) del rhs._v_ob @@ -192,12 +192,12 @@ def test___setstate___empty(self): value = jar[VALUE] = _makeTarget(oid=VALUE) value._p_jar = jar key2 = _makeTarget(oid=KEY2) - key2._p_jar = jar # not findable + key2._p_jar = jar # not findable kref2 = WeakRef(key2) del kref2._v_ob # force a miss value2 = jar[VALUE2] = _makeTarget(oid=VALUE2) value2._p_jar = jar - key3 = jar[KEY3] = _makeTarget(oid=KEY3) # findable + key3 = jar[KEY3] = _makeTarget(oid=KEY3) # findable key3._p_jar = jar kref3 = WeakRef(key3) del kref3._v_ob # force a miss, but win in the lookup @@ -227,6 +227,7 @@ def test___getitem___miss(self): value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne(None) + def _try(): return pwkd[key] self.assertRaises(KeyError, _try) @@ -248,6 +249,7 @@ def test___delitem___miss(self): value = jar['value'] = _makeTarget(oid='VALUE') value._p_jar = jar pwkd = self._makeOne(None) + def _try(): del pwkd[key] self.assertRaises(KeyError, _try) @@ -278,7 +280,7 @@ def test___contains___hit(self): self.assertTrue(key in pwkd) def test___iter___empty(self): - jar = _makeJar() + _makeJar() pwkd = self._makeOne(None) self.assertEqual(list(pwkd), []) @@ -316,23 +318,29 @@ def test_update_w_dict(self): def _makeTarget(oid=b'OID'): from persistent import Persistent + class Derived(Persistent): def __hash__(self): return hash(self._p_oid) + def __eq__(self, other): return self._p_oid == other._p_oid - def __repr__(self): # pragma: no cover + + def __repr__(self): # pragma: no cover return 'Derived: %s' % self._p_oid derived = Derived() derived._p_oid = oid return derived + def _makeJar(): class _DB: database_name = 'testing' + class _Jar(dict): - db = lambda self: _DB() + def db(self): return _DB() return _Jar() + def test_suite(): return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/src/persistent/tests/utils.py b/src/persistent/tests/utils.py index 49cb350..18ecd3b 100644 --- a/src/persistent/tests/utils.py +++ b/src/persistent/tests/utils.py @@ -7,9 +7,11 @@ class TrivialJar: def register(self, ob): """Does nothing""" + class ResettingJar: """Testing stub for _p_jar attribute. """ + def __init__(self): from zope.interface import directlyProvides @@ -27,7 +29,6 @@ def add(self, obj): obj._p_jar = self self.cache[obj._p_oid] = obj - # the following methods must be implemented to be a jar def setstate(self, obj): @@ -36,9 +37,11 @@ def setstate(self, obj): # but it suffices for the tests. obj.__class__.__init__(obj) + class RememberingJar: """Testing stub for _p_jar attribute. """ + def __init__(self): from persistent import PickleCache # XXX stub it! self.cache = PickleCache(self) @@ -55,7 +58,6 @@ def add(self, obj): self.obj = obj self.remembered = obj.__getstate__() - def fake_commit(self): self.remembered = self.obj.__getstate__() self.obj._p_changed = 0 @@ -102,7 +104,8 @@ def skipIfNoCExtension(o): from persistent._compat import _c_optimizations_ignored from persistent._compat import _should_attempt_c_optimizations - if _should_attempt_c_optimizations() and not _c_optimizations_available(): # pragma: no cover + if _should_attempt_c_optimizations( + ) and not _c_optimizations_available(): # pragma: no cover return unittest.expectedFailure(o) return unittest.skipIf( _c_optimizations_ignored() or not _c_optimizations_available(), diff --git a/src/persistent/timestamp.py b/src/persistent/timestamp.py index d9e6deb..5266b43 100644 --- a/src/persistent/timestamp.py +++ b/src/persistent/timestamp.py @@ -37,7 +37,7 @@ # Make sure to overflow and wraparound just # like the C code does. from ctypes import c_long -except ImportError: # pragma: no cover +except ImportError: # pragma: no cover # XXX: This is broken on 64-bit windows, where # sizeof(long) != sizeof(Py_ssize_t) # sizeof(long) == 4, sizeof(Py_ssize_t) == 8 @@ -45,8 +45,10 @@ # win32 platforms, but then that breaks PyPy3 64 bit for an unknown # reason. c_long = None + def _wraparound(x): - return int(((x + (_MAXINT + 1)) & ((_MAXINT << 1) + 1)) - (_MAXINT + 1)) + return int(((x + (_MAXINT + 1)) & ((_MAXINT << 1) + 1)) + - (_MAXINT + 1)) else: def _wraparound(x): return c_long(x).value @@ -57,22 +59,26 @@ def _UTC(): def _makeUTC(y, mo, d, h, mi, s): - s = round(s, 6) # microsecond precision, to match the C implementation + s = round(s, 6) # microsecond precision, to match the C implementation usec, sec = math.modf(s) sec = int(sec) usec = int(usec * 1e6) return datetime.datetime(y, mo, d, h, mi, sec, usec, tzinfo=_UTC()) + _EPOCH = _makeUTC(1970, 1, 1, 0, 0, 0) -_TS_SECOND_BYTES_BIAS = 60.0 / (1<<16) / (1<<16) +_TS_SECOND_BYTES_BIAS = 60.0 / (1 << 16) / (1 << 16) + def _makeRaw(year, month, day, hour, minute, second): a = (((year - 1900) * 12 + month - 1) * 31 + day - 1) a = (a * 24 + hour) * 60 + minute - b = int(second / _TS_SECOND_BYTES_BIAS) # Don't round() this; the C version just truncates + # Don't round() this; the C version just truncates + b = int(second / _TS_SECOND_BYTES_BIAS) return struct.pack('>II', a, b) + def _parseRaw(octets): a, b = struct.unpack('>II', octets) minute = a % 60 @@ -84,7 +90,6 @@ def _parseRaw(octets): return (year, month, day, hour, minute, second) - @use_c_impl @functools.total_ordering class TimeStamp: @@ -100,11 +105,12 @@ def __init__(self, *args): raise TypeError('Raw must be 8 octets') self._raw = raw elif len(args) == 6: - self._raw = _makeRaw(*args) # pylint:disable=no-value-for-parameter - # Note that we don't preserve the incoming arguments in self._elements, - # we derive them from the raw value. This is because the incoming - # seconds value could have more precision than would survive - # in the raw data, so we must be consistent. + self._raw = _makeRaw( + *args) # pylint:disable=no-value-for-parameter + # Note that we don't preserve the incoming arguments in + # self._elements, we derive them from the raw value. This is + # because the incoming seconds value could have more precision than + # would survive in the raw data, so we must be consistent. else: raise TypeError('Pass either a single 8-octet arg ' 'or 5 integers and a float') @@ -185,7 +191,7 @@ def __hash__(self): x = _wraparound(x) - if x == -1: # pragma: no cover + if x == -1: # pragma: no cover # The C version has this condition, but it's not clear # why; it's also not immediately obvious what bytestring # would generate this---hence the no-cover @@ -199,6 +205,6 @@ def __lt__(self, other): return NotImplemented -# This name is bound by the ``@use_c_impl`` decorator to the class defined above. -# We make sure and list it statically, though, to help out linters. -TimeStampPy = TimeStampPy # pylint:disable=undefined-variable,self-assigning-variable +# This name is bound by the ``@use_c_impl`` decorator to the class defined +# above. We make sure and list it statically, though, to help out linters. +TimeStampPy = TimeStampPy # noqa: F821 undefined name 'TimeStampPy' diff --git a/src/persistent/wref.py b/src/persistent/wref.py index 85517dc..e9357fc 100644 --- a/src/persistent/wref.py +++ b/src/persistent/wref.py @@ -19,6 +19,7 @@ WeakRefMarker = object() + class WeakRef: """Persistent weak references @@ -84,7 +85,7 @@ def __init__(self, adict=None, **kwargs): self.update(adict) # XXX 'kwargs' is pointless, because keys must be strings, but we # are going to try (and fail) to wrap a WeakRef around them. - if kwargs: # pragma: no cover + if kwargs: # pragma: no cover self.update(kwargs) def __getstate__(self): @@ -96,7 +97,7 @@ def __setstate__(self, state): state['data'] = { k: v for (k, v) in state['data'] if k() is not None - } + } Persistent.__setstate__(self, state) def __setitem__(self, key, value):