view mercurial/pvec.py @ 49599:48e38b179106 stable

demandimport: fix a crash in LazyFinder.__delattr__ I was tinkering with `with hgdemandimport.deactivated()` wrapped around loading the keyring module, and got spew that seemed to be confirmed by PyCharm. But I can't believe we haven't seen this before (and phabricator uses the same pattern): ** Unknown exception encountered with possibly-broken third-party extension "mercurial_keyring" 1.4.3 (keyring 23.11.0, backend unknown) ** which supports versions unknown of Mercurial. ** Please disable "mercurial_keyring" and try your action again. ** If that fixes the bug please report it to https://foss.heptapod.net/mercurial/mercurial_keyring/issues ** Python 3.9.15 (main, Oct 13 2022, 04:28:25) [GCC 7.5.0] ** Mercurial Distributed SCM (version 6.3.1) ** Extensions loaded: absorb, attorc 20220315, blackbox, eol, extdiff, fastannotate, lfs, mercurial_keyring 1.4.3 (keyring 23.11.0, backend unknown), phabblocker 20220315, phabricator 20220315, purge, rebase, schemes, share, show, strip, uncommit Traceback (most recent call last): File "/usr/local/bin/hg", line 59, in <module> dispatch.run() File "/usr/local/lib/python3.9/site-packages/mercurial/dispatch.py", line 143, in run status = dispatch(req) File "/usr/local/lib/python3.9/site-packages/mercurial/dispatch.py", line 232, in dispatch status = _rundispatch(req) File "/usr/local/lib/python3.9/site-packages/mercurial/dispatch.py", line 276, in _rundispatch ret = _runcatch(req) or 0 File "/usr/local/lib/python3.9/site-packages/mercurial/dispatch.py", line 451, in _runcatch return _callcatch(ui, _runcatchfunc) File "/usr/local/lib/python3.9/site-packages/mercurial/dispatch.py", line 461, in _callcatch return scmutil.callcatch(ui, func) File "/usr/local/lib/python3.9/site-packages/mercurial/scmutil.py", line 153, in callcatch return func() File "/usr/local/lib/python3.9/site-packages/mercurial/dispatch.py", line 441, in _runcatchfunc return _dispatch(req) File "/usr/local/lib/python3.9/site-packages/mercurial/dispatch.py", line 1265, in _dispatch return runcommand( File "/usr/local/lib/python3.9/site-packages/mercurial/dispatch.py", line 899, in runcommand ret = _runcommand(ui, options, cmd, d) File "/usr/local/lib/python3.9/site-packages/mercurial/dispatch.py", line 1277, in _runcommand return cmdfunc() File "/usr/local/lib/python3.9/site-packages/mercurial/dispatch.py", line 1263, in <lambda> d = lambda: util.checksignature(func)(ui, *args, **strcmdopt) File "/usr/local/lib/python3.9/site-packages/mercurial/util.py", line 1880, in check return func(*args, **kwargs) File "/root/mercurial_keyring/mercurial_keyring/mercurial_keyring.py", line 962, in cmd_keyring_check user, pwd, source, final_url = handler.get_credentials( File "/root/mercurial_keyring/mercurial_keyring/mercurial_keyring.py", line 497, in get_credentials keyring_pwd = password_store.get_http_password(keyring_url, actual_user) File "/root/mercurial_keyring/mercurial_keyring/mercurial_keyring.py", line 287, in get_http_password return self._read_password_from_keyring( File "/root/mercurial_keyring/mercurial_keyring/mercurial_keyring.py", line 335, in _read_password_from_keyring keyring = import_keyring() >> `with hgdemandimport.deactivated()` inserted here File "/root/mercurial_keyring/mercurial_keyring/mercurial_keyring.py", line 120, in import_keyring return _import_keyring() File "/root/mercurial_keyring/mercurial_keyring/mercurial_keyring.py", line 133, in _import_keyring mod, was_imported_now = meu.direct_import_ext( File "/usr/lib/python3.9/site-packages/mercurial_extension_utils.py", line 1381, in direct_import_ext __import__(module_name) File "<frozen importlib._bootstrap>", line 1007, in _find_and_load File "<frozen importlib._bootstrap>", line 986, in _find_and_load_unlocked File "<frozen importlib._bootstrap>", line 680, in _load_unlocked File "/usr/local/lib/python3.9/site-packages/hgdemandimport/demandimportpy3.py", line 46, in exec_module self.loader.exec_module(module) File "/usr/lib/python3.9/site-packages/keyring/__init__.py", line 1, in <module> from .core import ( File "<frozen importlib._bootstrap>", line 1007, in _find_and_load File "<frozen importlib._bootstrap>", line 986, in _find_and_load_unlocked File "<frozen importlib._bootstrap>", line 680, in _load_unlocked File "/usr/local/lib/python3.9/site-packages/hgdemandimport/demandimportpy3.py", line 46, in exec_module self.loader.exec_module(module) File "/usr/lib/python3.9/site-packages/keyring/core.py", line 11, in <module> from . import backend, credentials File "<frozen importlib._bootstrap>", line 1007, in _find_and_load File "<frozen importlib._bootstrap>", line 986, in _find_and_load_unlocked File "<frozen importlib._bootstrap>", line 680, in _load_unlocked File "/usr/local/lib/python3.9/site-packages/hgdemandimport/demandimportpy3.py", line 46, in exec_module self.loader.exec_module(module) File "/usr/lib/python3.9/site-packages/keyring/backend.py", line 13, in <module> from .py312compat import metadata File "<frozen importlib._bootstrap>", line 1007, in _find_and_load File "<frozen importlib._bootstrap>", line 986, in _find_and_load_unlocked File "<frozen importlib._bootstrap>", line 680, in _load_unlocked File "/usr/local/lib/python3.9/site-packages/hgdemandimport/demandimportpy3.py", line 46, in exec_module self.loader.exec_module(module) File "/usr/lib/python3.9/site-packages/keyring/py312compat.py", line 10, in <module> import importlib_metadata as metadata # type: ignore File "<frozen importlib._bootstrap>", line 1007, in _find_and_load File "<frozen importlib._bootstrap>", line 986, in _find_and_load_unlocked File "<frozen importlib._bootstrap>", line 680, in _load_unlocked File "/usr/local/lib/python3.9/site-packages/hgdemandimport/demandimportpy3.py", line 46, in exec_module self.loader.exec_module(module) File "/usr/lib/python3.9/site-packages/importlib_metadata/__init__.py", line 715, in <module> class MetadataPathFinder(NullFinder, DistributionFinder): File "/usr/lib/python3.9/site-packages/importlib_metadata/_compat.py", line 24, in install disable_stdlib_finder() File "/usr/lib/python3.9/site-packages/importlib_metadata/_compat.py", line 43, in disable_stdlib_finder del finder.find_distributions File "/usr/local/lib/python3.9/site-packages/hgdemandimport/demandimportpy3.py", line 88, in __delattr__ return delattr(object.__getattribute__(self, "_finder")) TypeError: delattr expected 2 arguments, got 1
author Matt Harbison <matt_harbison@yahoo.com>
date Thu, 08 Dec 2022 21:45:47 -0500
parents d44e3c45f0e4
children d718eddf01d9
line wrap: on
line source

# pvec.py - probabilistic vector clocks for Mercurial
#
# Copyright 2012 Olivia Mackall <olivia@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.

'''
A "pvec" is a changeset property based on the theory of vector clocks
that can be compared to discover relatedness without consulting a
graph. This can be useful for tasks like determining how a
disconnected patch relates to a repository.

Currently a pvec consist of 448 bits, of which 24 are 'depth' and the
remainder are a bit vector. It is represented as a 70-character base85
string.

Construction:

- a root changeset has a depth of 0 and a bit vector based on its hash
- a normal commit has a changeset where depth is increased by one and
  one bit vector bit is flipped based on its hash
- a merge changeset pvec is constructed by copying changes from one pvec into
  the other to balance its depth

Properties:

- for linear changes, difference in depth is always <= hamming distance
- otherwise, changes are probably divergent
- when hamming distance is < 200, we can reliably detect when pvecs are near

Issues:

- hamming distance ceases to work over distances of ~ 200
- detecting divergence is less accurate when the common ancestor is very close
  to either revision or total distance is high
- this could probably be improved by modeling the relation between
  delta and hdist

Uses:

- a patch pvec can be used to locate the nearest available common ancestor for
  resolving conflicts
- ordering of patches can be established without a DAG
- two head pvecs can be compared to determine whether push/pull/merge is needed
  and approximately how many changesets are involved
- can be used to find a heuristic divergence measure between changesets on
  different branches
'''


from .node import nullrev
from . import (
    pycompat,
    util,
)

_size = 448  # 70 chars b85-encoded
_bytes = _size // 8
_depthbits = 24
_depthbytes = _depthbits // 8
_vecbytes = _bytes - _depthbytes
_vecbits = _vecbytes * 8
_radius = (_vecbits - 30) // 2  # high probability vectors are related


def _bin(bs):
    '''convert a bytestring to a long'''
    v = 0
    for b in bs:
        v = v * 256 + ord(b)
    return v


def _str(v, l):
    # type: (int, int) -> bytes
    bs = b""
    for p in range(l):
        bs = pycompat.bytechr(v & 255) + bs
        v >>= 8
    return bs


def _split(b):
    '''depth and bitvec'''
    return _bin(b[:_depthbytes]), _bin(b[_depthbytes:])


def _join(depth, bitvec):
    return _str(depth, _depthbytes) + _str(bitvec, _vecbytes)


def _hweight(x):
    c = 0
    while x:
        if x & 1:
            c += 1
        x >>= 1
    return c


_htab = [_hweight(x) for x in range(256)]


def _hamming(a, b):
    '''find the hamming distance between two longs'''
    d = a ^ b
    c = 0
    while d:
        c += _htab[d & 0xFF]
        d >>= 8
    return c


def _mergevec(x, y, c):
    # Ideally, this function would be x ^ y ^ ancestor, but finding
    # ancestors is a nuisance. So instead we find the minimal number
    # of changes to balance the depth and hamming distance

    d1, v1 = x
    d2, v2 = y
    if d1 < d2:
        d1, d2, v1, v2 = d2, d1, v2, v1

    hdist = _hamming(v1, v2)
    ddist = d1 - d2
    v = v1
    m = v1 ^ v2  # mask of different bits
    i = 1

    if hdist > ddist:
        # if delta = 10 and hdist = 100, then we need to go up 55 steps
        # to the ancestor and down 45
        changes = (hdist - ddist + 1) // 2
    else:
        # must make at least one change
        changes = 1
    depth = d1 + changes

    # copy changes from v2
    if m:
        while changes:
            if m & i:
                v ^= i
                changes -= 1
            i <<= 1
    else:
        v = _flipbit(v, c)

    return depth, v


def _flipbit(v, node):
    # converting bit strings to longs is slow
    bit = (hash(node) & 0xFFFFFFFF) % _vecbits
    return v ^ (1 << bit)


def ctxpvec(ctx):
    '''construct a pvec for ctx while filling in the cache'''
    r = ctx.repo()
    if not util.safehasattr(r, "_pveccache"):
        r._pveccache = {}
    pvc = r._pveccache
    if ctx.rev() not in pvc:
        cl = r.changelog
        for n in range(ctx.rev() + 1):
            if n not in pvc:
                node = cl.node(n)
                p1, p2 = cl.parentrevs(n)
                if p1 == nullrev:
                    # start with a 'random' vector at root
                    pvc[n] = (0, _bin((node * 3)[:_vecbytes]))
                elif p2 == nullrev:
                    d, v = pvc[p1]
                    pvc[n] = (d + 1, _flipbit(v, node))
                else:
                    pvc[n] = _mergevec(pvc[p1], pvc[p2], node)
    bs = _join(*pvc[ctx.rev()])
    return pvec(util.b85encode(bs))


class pvec:
    def __init__(self, hashorctx):
        if isinstance(hashorctx, bytes):
            self._bs = hashorctx
            self._depth, self._vec = _split(util.b85decode(hashorctx))
        else:
            self._vec = ctxpvec(hashorctx)

    def __str__(self):
        return self._bs

    def __eq__(self, b):
        return self._vec == b._vec and self._depth == b._depth

    def __lt__(self, b):
        delta = b._depth - self._depth
        if delta < 0:
            return False  # always correct
        if _hamming(self._vec, b._vec) > delta:
            return False
        return True

    def __gt__(self, b):
        return b < self

    def __or__(self, b):
        delta = abs(b._depth - self._depth)
        if _hamming(self._vec, b._vec) <= delta:
            return False
        return True

    def __sub__(self, b):
        if self | b:
            raise ValueError(b"concurrent pvecs")
        return self._depth - b._depth

    def distance(self, b):
        d = abs(b._depth - self._depth)
        h = _hamming(self._vec, b._vec)
        return max(d, h)

    def near(self, b):
        dist = abs(b.depth - self._depth)
        if dist > _radius or _hamming(self._vec, b._vec) > _radius:
            return False