Mercurial > public > mercurial-scm > hg
comparison mercurial/utils/storageutil.py @ 43076:2372284d9457
formatting: blacken the codebase
This is using my patch to black
(https://github.com/psf/black/pull/826) so we don't un-wrap collection
literals.
Done with:
hg files 'set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**"' | xargs black -S
# skip-blame mass-reformatting only
# no-check-commit reformats foo_bar functions
Differential Revision: https://phab.mercurial-scm.org/D6971
author | Augie Fackler <augie@google.com> |
---|---|
date | Sun, 06 Oct 2019 09:45:02 -0400 |
parents | 268662aac075 |
children | 687b865b95ad |
comparison
equal
deleted
inserted
replaced
43075:57875cf423c9 | 43076:2372284d9457 |
---|---|
24 pycompat, | 24 pycompat, |
25 ) | 25 ) |
26 from ..interfaces import repository | 26 from ..interfaces import repository |
27 | 27 |
28 _nullhash = hashlib.sha1(nullid) | 28 _nullhash = hashlib.sha1(nullid) |
29 | |
29 | 30 |
30 def hashrevisionsha1(text, p1, p2): | 31 def hashrevisionsha1(text, p1, p2): |
31 """Compute the SHA-1 for revision data and its parents. | 32 """Compute the SHA-1 for revision data and its parents. |
32 | 33 |
33 This hash combines both the current file contents and its history | 34 This hash combines both the current file contents and its history |
50 s = hashlib.sha1(a) | 51 s = hashlib.sha1(a) |
51 s.update(b) | 52 s.update(b) |
52 s.update(text) | 53 s.update(text) |
53 return s.digest() | 54 return s.digest() |
54 | 55 |
56 | |
55 METADATA_RE = re.compile(b'\x01\n') | 57 METADATA_RE = re.compile(b'\x01\n') |
58 | |
56 | 59 |
57 def parsemeta(text): | 60 def parsemeta(text): |
58 """Parse metadata header from revision data. | 61 """Parse metadata header from revision data. |
59 | 62 |
60 Returns a 2-tuple of (metadata, offset), where both can be None if there | 63 Returns a 2-tuple of (metadata, offset), where both can be None if there |
69 for l in mtext.splitlines(): | 72 for l in mtext.splitlines(): |
70 k, v = l.split(b': ', 1) | 73 k, v = l.split(b': ', 1) |
71 meta[k] = v | 74 meta[k] = v |
72 return meta, s + 2 | 75 return meta, s + 2 |
73 | 76 |
77 | |
74 def packmeta(meta, text): | 78 def packmeta(meta, text): |
75 """Add metadata to fulltext to produce revision text.""" | 79 """Add metadata to fulltext to produce revision text.""" |
76 keys = sorted(meta) | 80 keys = sorted(meta) |
77 metatext = b''.join(b'%s: %s\n' % (k, meta[k]) for k in keys) | 81 metatext = b''.join(b'%s: %s\n' % (k, meta[k]) for k in keys) |
78 return b'\x01\n%s\x01\n%s' % (metatext, text) | 82 return b'\x01\n%s\x01\n%s' % (metatext, text) |
79 | 83 |
84 | |
80 def iscensoredtext(text): | 85 def iscensoredtext(text): |
81 meta = parsemeta(text)[0] | 86 meta = parsemeta(text)[0] |
82 return meta and b'censored' in meta | 87 return meta and b'censored' in meta |
83 | 88 |
89 | |
84 def filtermetadata(text): | 90 def filtermetadata(text): |
85 """Extract just the revision data from source text. | 91 """Extract just the revision data from source text. |
86 | 92 |
87 Returns ``text`` unless it has a metadata header, in which case we return | 93 Returns ``text`` unless it has a metadata header, in which case we return |
88 a new buffer without hte metadata. | 94 a new buffer without hte metadata. |
89 """ | 95 """ |
90 if not text.startswith(b'\x01\n'): | 96 if not text.startswith(b'\x01\n'): |
91 return text | 97 return text |
92 | 98 |
93 offset = text.index(b'\x01\n', 2) | 99 offset = text.index(b'\x01\n', 2) |
94 return text[offset + 2:] | 100 return text[offset + 2 :] |
101 | |
95 | 102 |
96 def filerevisioncopied(store, node): | 103 def filerevisioncopied(store, node): |
97 """Resolve file revision copy metadata. | 104 """Resolve file revision copy metadata. |
98 | 105 |
99 Returns ``False`` if the file has no copy metadata. Otherwise a | 106 Returns ``False`` if the file has no copy metadata. Otherwise a |
110 if meta and b'copy' in meta and b'copyrev' in meta: | 117 if meta and b'copy' in meta and b'copyrev' in meta: |
111 return meta[b'copy'], bin(meta[b'copyrev']) | 118 return meta[b'copy'], bin(meta[b'copyrev']) |
112 | 119 |
113 return False | 120 return False |
114 | 121 |
122 | |
115 def filedataequivalent(store, node, filedata): | 123 def filedataequivalent(store, node, filedata): |
116 """Determines whether file data is equivalent to a stored node. | 124 """Determines whether file data is equivalent to a stored node. |
117 | 125 |
118 Returns True if the passed file data would hash to the same value | 126 Returns True if the passed file data would hash to the same value |
119 as a stored revision and False otherwise. | 127 as a stored revision and False otherwise. |
145 # remains unchanged. Check if that's the case. | 153 # remains unchanged. Check if that's the case. |
146 if store.renamed(node): | 154 if store.renamed(node): |
147 return store.read(node) == filedata | 155 return store.read(node) == filedata |
148 | 156 |
149 return False | 157 return False |
158 | |
150 | 159 |
151 def iterrevs(storelen, start=0, stop=None): | 160 def iterrevs(storelen, start=0, stop=None): |
152 """Iterate over revision numbers in a store.""" | 161 """Iterate over revision numbers in a store.""" |
153 step = 1 | 162 step = 1 |
154 | 163 |
161 else: | 170 else: |
162 stop = storelen | 171 stop = storelen |
163 | 172 |
164 return pycompat.xrange(start, stop, step) | 173 return pycompat.xrange(start, stop, step) |
165 | 174 |
175 | |
166 def fileidlookup(store, fileid, identifier): | 176 def fileidlookup(store, fileid, identifier): |
167 """Resolve the file node for a value. | 177 """Resolve the file node for a value. |
168 | 178 |
169 ``store`` is an object implementing the ``ifileindex`` interface. | 179 ``store`` is an object implementing the ``ifileindex`` interface. |
170 | 180 |
182 """ | 192 """ |
183 if isinstance(fileid, int): | 193 if isinstance(fileid, int): |
184 try: | 194 try: |
185 return store.node(fileid) | 195 return store.node(fileid) |
186 except IndexError: | 196 except IndexError: |
187 raise error.LookupError('%d' % fileid, identifier, | 197 raise error.LookupError( |
188 _('no match found')) | 198 '%d' % fileid, identifier, _('no match found') |
199 ) | |
189 | 200 |
190 if len(fileid) == 20: | 201 if len(fileid) == 20: |
191 try: | 202 try: |
192 store.rev(fileid) | 203 store.rev(fileid) |
193 return fileid | 204 return fileid |
214 pass | 225 pass |
215 except (ValueError, OverflowError): | 226 except (ValueError, OverflowError): |
216 pass | 227 pass |
217 | 228 |
218 raise error.LookupError(fileid, identifier, _('no match found')) | 229 raise error.LookupError(fileid, identifier, _('no match found')) |
230 | |
219 | 231 |
220 def resolvestripinfo(minlinkrev, tiprev, headrevs, linkrevfn, parentrevsfn): | 232 def resolvestripinfo(minlinkrev, tiprev, headrevs, linkrevfn, parentrevsfn): |
221 """Resolve information needed to strip revisions. | 233 """Resolve information needed to strip revisions. |
222 | 234 |
223 Finds the minimum revision number that must be stripped in order to | 235 Finds the minimum revision number that must be stripped in order to |
266 if plinkrev >= minlinkrev: | 278 if plinkrev >= minlinkrev: |
267 futurelargelinkrevs.add(plinkrev) | 279 futurelargelinkrevs.add(plinkrev) |
268 | 280 |
269 return strippoint, brokenrevs | 281 return strippoint, brokenrevs |
270 | 282 |
271 def emitrevisions(store, nodes, nodesorder, resultcls, deltaparentfn=None, | 283 |
272 candeltafn=None, rawsizefn=None, revdifffn=None, flagsfn=None, | 284 def emitrevisions( |
273 deltamode=repository.CG_DELTAMODE_STD, | 285 store, |
274 revisiondata=False, assumehaveparentrevisions=False): | 286 nodes, |
287 nodesorder, | |
288 resultcls, | |
289 deltaparentfn=None, | |
290 candeltafn=None, | |
291 rawsizefn=None, | |
292 revdifffn=None, | |
293 flagsfn=None, | |
294 deltamode=repository.CG_DELTAMODE_STD, | |
295 revisiondata=False, | |
296 assumehaveparentrevisions=False, | |
297 ): | |
275 """Generic implementation of ifiledata.emitrevisions(). | 298 """Generic implementation of ifiledata.emitrevisions(). |
276 | 299 |
277 Emitting revision data is subtly complex. This function attempts to | 300 Emitting revision data is subtly complex. This function attempts to |
278 encapsulate all the logic for doing so in a backend-agnostic way. | 301 encapsulate all the logic for doing so in a backend-agnostic way. |
279 | 302 |
341 if nodesorder == 'nodes': | 364 if nodesorder == 'nodes': |
342 revs = [frev(n) for n in nodes] | 365 revs = [frev(n) for n in nodes] |
343 elif nodesorder == 'linear': | 366 elif nodesorder == 'linear': |
344 revs = set(frev(n) for n in nodes) | 367 revs = set(frev(n) for n in nodes) |
345 revs = dagop.linearize(revs, store.parentrevs) | 368 revs = dagop.linearize(revs, store.parentrevs) |
346 else: # storage and default | 369 else: # storage and default |
347 revs = sorted(frev(n) for n in nodes) | 370 revs = sorted(frev(n) for n in nodes) |
348 | 371 |
349 prevrev = None | 372 prevrev = None |
350 | 373 |
351 if deltamode == repository.CG_DELTAMODE_PREV or assumehaveparentrevisions: | 374 if deltamode == repository.CG_DELTAMODE_PREV or assumehaveparentrevisions: |
386 if deltaparentrev in available: | 409 if deltaparentrev in available: |
387 baserev = deltaparentrev | 410 baserev = deltaparentrev |
388 | 411 |
389 # Base revision is a parent that hasn't been emitted already. | 412 # Base revision is a parent that hasn't been emitted already. |
390 # Use it if we can assume the receiver has the parent revision. | 413 # Use it if we can assume the receiver has the parent revision. |
391 elif (assumehaveparentrevisions | 414 elif assumehaveparentrevisions and deltaparentrev in (p1rev, p2rev): |
392 and deltaparentrev in (p1rev, p2rev)): | |
393 baserev = deltaparentrev | 415 baserev = deltaparentrev |
394 | 416 |
395 # No guarantee the receiver has the delta parent. Send delta | 417 # No guarantee the receiver has the delta parent. Send delta |
396 # against last revision (if possible), which in the common case | 418 # against last revision (if possible), which in the common case |
397 # should be similar enough to this revision that the delta is | 419 # should be similar enough to this revision that the delta is |
430 if rawsizefn: | 452 if rawsizefn: |
431 baserevisionsize = rawsizefn(baserev) | 453 baserevisionsize = rawsizefn(baserev) |
432 else: | 454 else: |
433 baserevisionsize = len(store.rawdata(baserev)) | 455 baserevisionsize = len(store.rawdata(baserev)) |
434 | 456 |
435 elif (baserev == nullrev | 457 elif ( |
436 and deltamode != repository.CG_DELTAMODE_PREV): | 458 baserev == nullrev and deltamode != repository.CG_DELTAMODE_PREV |
459 ): | |
437 revision = store.rawdata(node) | 460 revision = store.rawdata(node) |
438 available.add(rev) | 461 available.add(rev) |
439 else: | 462 else: |
440 if revdifffn: | 463 if revdifffn: |
441 delta = revdifffn(baserev, rev) | 464 delta = revdifffn(baserev, rev) |
442 else: | 465 else: |
443 delta = mdiff.textdiff(store.rawdata(baserev), | 466 delta = mdiff.textdiff( |
444 store.rawdata(rev)) | 467 store.rawdata(baserev), store.rawdata(rev) |
468 ) | |
445 | 469 |
446 available.add(rev) | 470 available.add(rev) |
447 | 471 |
448 yield resultcls( | 472 yield resultcls( |
449 node=node, | 473 node=node, |
451 p2node=fnode(p2rev), | 475 p2node=fnode(p2rev), |
452 basenode=fnode(baserev), | 476 basenode=fnode(baserev), |
453 flags=flagsfn(rev) if flagsfn else 0, | 477 flags=flagsfn(rev) if flagsfn else 0, |
454 baserevisionsize=baserevisionsize, | 478 baserevisionsize=baserevisionsize, |
455 revision=revision, | 479 revision=revision, |
456 delta=delta) | 480 delta=delta, |
481 ) | |
457 | 482 |
458 prevrev = rev | 483 prevrev = rev |
484 | |
459 | 485 |
460 def deltaiscensored(delta, baserev, baselenfn): | 486 def deltaiscensored(delta, baserev, baselenfn): |
461 """Determine if a delta represents censored revision data. | 487 """Determine if a delta represents censored revision data. |
462 | 488 |
463 ``baserev`` is the base revision this delta is encoded against. | 489 ``baserev`` is the base revision this delta is encoded against. |
481 if delta[:hlen] != mdiff.replacediffheader(oldlen, newlen): | 507 if delta[:hlen] != mdiff.replacediffheader(oldlen, newlen): |
482 return False | 508 return False |
483 | 509 |
484 add = "\1\ncensored:" | 510 add = "\1\ncensored:" |
485 addlen = len(add) | 511 addlen = len(add) |
486 return newlen >= addlen and delta[hlen:hlen + addlen] == add | 512 return newlen >= addlen and delta[hlen : hlen + addlen] == add |