Mercurial > public > mercurial-scm > hg-stable
annotate mercurial/revlog.py @ 86:1b945e8ba67b
Friendlier exceptions for unknown node errors
author | mpm@selenic.com |
---|---|
date | Tue, 17 May 2005 12:20:29 -0800 |
parents | b2e3528115da |
children | 7daef883134f |
rev | line source |
---|---|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
1 # revlog.py - storage back-end for mercurial |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
2 # |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
3 # This provides efficient delta storage with O(1) retrieve and append |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
4 # and O(changes) merge between branches |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
5 # |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
6 # Copyright 2005 Matt Mackall <mpm@selenic.com> |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
7 # |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
8 # This software may be used and distributed according to the terms |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
9 # of the GNU General Public License, incorporated herein by reference. |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
10 |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
11 import zlib, struct, sha, os, tempfile, binascii |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
12 from mercurial import mdiff |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
13 |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
14 def hex(node): return binascii.hexlify(node) |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
15 def bin(node): return binascii.unhexlify(node) |
83 | 16 def short(node): return hex(node[:4]) |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
17 |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
18 def compress(text): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
19 return zlib.compress(text) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
20 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
21 def decompress(bin): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
22 return zlib.decompress(bin) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
23 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
24 def hash(text, p1, p2): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
25 l = [p1, p2] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
26 l.sort() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
27 return sha.sha(l[0] + l[1] + text).digest() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
28 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
29 nullid = "\0" * 20 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
30 indexformat = ">4l20s20s20s" |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
31 |
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
32 class lazyparser: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
33 def __init__(self, data): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
34 self.data = data |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
35 self.s = struct.calcsize(indexformat) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
36 self.l = len(data)/self.s |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
37 self.index = [None] * self.l |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
38 self.map = {nullid: -1} |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
39 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
40 if 0: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
41 n = 0 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
42 i = self.data |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
43 s = struct.calcsize(indexformat) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
44 for f in xrange(0, len(i), s): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
45 # offset, size, base, linkrev, p1, p2, nodeid |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
46 e = struct.unpack(indexformat, i[f:f + s]) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
47 self.map[e[6]] = n |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
48 self.index.append(e) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
49 n += 1 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
50 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
51 def load(self, pos): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
52 block = pos / 1000 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
53 i = block * 1000 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
54 end = min(self.l, i + 1000) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
55 while i < end: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
56 d = self.data[i * self.s: (i + 1) * self.s] |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
57 e = struct.unpack(indexformat, d) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
58 self.index[i] = e |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
59 self.map[e[6]] = i |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
60 i += 1 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
61 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
62 class lazyindex: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
63 def __init__(self, parser): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
64 self.p = parser |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
65 def __len__(self): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
66 return len(self.p.index) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
67 def __getitem__(self, pos): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
68 i = self.p.index[pos] |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
69 if not i: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
70 self.p.load(pos) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
71 return self.p.index[pos] |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
72 return i |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
73 def append(self, e): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
74 self.p.index.append(e) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
75 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
76 class lazymap: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
77 def __init__(self, parser): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
78 self.p = parser |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
79 def load(self, key): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
80 n = self.p.data.find(key) |
86
1b945e8ba67b
Friendlier exceptions for unknown node errors
mpm@selenic.com
parents:
84
diff
changeset
|
81 if n < 0: raise KeyError("node " + hex(key)) |
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
82 pos = n / self.p.s |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
83 self.p.load(pos) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
84 def __contains__(self, key): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
85 try: |
77 | 86 self[key] |
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
87 return True |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
88 except KeyError: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
89 return False |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
90 def __getitem__(self, key): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
91 try: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
92 return self.p.map[key] |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
93 except KeyError: |
86
1b945e8ba67b
Friendlier exceptions for unknown node errors
mpm@selenic.com
parents:
84
diff
changeset
|
94 try: |
1b945e8ba67b
Friendlier exceptions for unknown node errors
mpm@selenic.com
parents:
84
diff
changeset
|
95 self.load(key) |
1b945e8ba67b
Friendlier exceptions for unknown node errors
mpm@selenic.com
parents:
84
diff
changeset
|
96 return self.p.map[key] |
1b945e8ba67b
Friendlier exceptions for unknown node errors
mpm@selenic.com
parents:
84
diff
changeset
|
97 except KeyError: |
1b945e8ba67b
Friendlier exceptions for unknown node errors
mpm@selenic.com
parents:
84
diff
changeset
|
98 raise KeyError("node " + hex(key)) |
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
99 def __setitem__(self, key, val): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
100 self.p.map[key] = val |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
101 |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
102 class revlog: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
103 def __init__(self, opener, indexfile, datafile): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
104 self.indexfile = indexfile |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
105 self.datafile = datafile |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
106 self.opener = opener |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
107 self.cache = None |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
108 # read the whole index for now, handle on-demand later |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
109 try: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
110 i = self.opener(self.indexfile).read() |
73 | 111 except IOError: |
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
112 i = "" |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
113 parser = lazyparser(i) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
114 self.index = lazyindex(parser) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
115 self.nodemap = lazymap(parser) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
116 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
117 def tip(self): return self.node(len(self.index) - 1) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
118 def count(self): return len(self.index) |
26 | 119 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6] |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
120 def rev(self, node): return self.nodemap[node] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
121 def linkrev(self, node): return self.index[self.nodemap[node]][3] |
2 | 122 def parents(self, node): |
123 if node == nullid: return (nullid, nullid) | |
124 return self.index[self.nodemap[node]][4:6] | |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
125 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
126 def start(self, rev): return self.index[rev][0] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
127 def length(self, rev): return self.index[rev][1] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
128 def end(self, rev): return self.start(rev) + self.length(rev) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
129 def base(self, rev): return self.index[rev][2] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
130 |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
131 def lookup(self, id): |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
132 try: |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
133 rev = int(id) |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
134 return self.node(rev) |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
135 except ValueError: |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
136 c = [] |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
137 for n in self.nodemap: |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
138 if id in hex(n): |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
139 c.append(n) |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
140 if len(c) > 1: raise KeyError("Ambiguous identifier") |
67 | 141 if len(c) < 1: raise KeyError("No match found") |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
142 return c[0] |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
143 |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
144 return None |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
145 |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
146 def revisions(self, list): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
147 # this can be optimized to do spans, etc |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
148 # be stupid for now |
14
e0e5c1b9febd
Fix off-by-one truncation in transaction rollback.
mpm@selenic.com
parents:
2
diff
changeset
|
149 for node in list: |
e0e5c1b9febd
Fix off-by-one truncation in transaction rollback.
mpm@selenic.com
parents:
2
diff
changeset
|
150 yield self.revision(node) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
151 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
152 def diff(self, a, b): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
153 return mdiff.textdiff(a, b) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
154 |
73 | 155 def patches(self, t, pl): |
156 return mdiff.patches(t, pl) | |
157 | |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
158 def revision(self, node): |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
159 if node == nullid: return "" |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
160 if self.cache and self.cache[0] == node: return self.cache[2] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
161 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
162 text = None |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
163 rev = self.rev(node) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
164 base = self.base(rev) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
165 start = self.start(base) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
166 end = self.end(rev) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
167 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
168 if self.cache and self.cache[1] >= base and self.cache[1] < rev: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
169 base = self.cache[1] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
170 start = self.start(base + 1) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
171 text = self.cache[2] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
172 last = 0 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
173 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
174 f = self.opener(self.datafile) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
175 f.seek(start) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
176 data = f.read(end - start) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
177 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
178 if not text: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
179 last = self.length(base) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
180 text = decompress(data[:last]) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
181 |
71
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
182 bins = [] |
64 | 183 for r in xrange(base + 1, rev + 1): |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
184 s = self.length(r) |
71
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
185 bins.append(decompress(data[last:last + s])) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
186 last = last + s |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
187 |
71
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
188 text = mdiff.patches(text, bins) |
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
189 |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
190 (p1, p2) = self.parents(node) |
26 | 191 if node != hash(text, p1, p2): |
192 raise "integrity check failed on %s:%d" % (self.datafile, rev) | |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
193 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
194 self.cache = (node, rev, text) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
195 return text |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
196 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
197 def addrevision(self, text, transaction, link, p1=None, p2=None): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
198 if text is None: text = "" |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
199 if p1 is None: p1 = self.tip() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
200 if p2 is None: p2 = nullid |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
201 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
202 node = hash(text, p1, p2) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
203 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
204 n = self.count() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
205 t = n - 1 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
206 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
207 if n: |
64 | 208 base = self.base(t) |
209 start = self.start(base) | |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
210 end = self.end(t) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
211 prev = self.revision(self.tip()) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
212 data = compress(self.diff(prev, text)) |
64 | 213 dist = end - start + len(data) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
214 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
215 # full versions are inserted when the needed deltas |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
216 # become comparable to the uncompressed text |
64 | 217 if not n or dist > len(text) * 2: |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
218 data = compress(text) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
219 base = n |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
220 else: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
221 base = self.base(t) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
222 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
223 offset = 0 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
224 if t >= 0: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
225 offset = self.end(t) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
226 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
227 e = (offset, len(data), base, link, p1, p2, node) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
228 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
229 self.index.append(e) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
230 self.nodemap[node] = n |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
231 entry = struct.pack(indexformat, *e) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
232 |
26 | 233 transaction.add(self.datafile, e[0]) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
234 self.opener(self.datafile, "a").write(data) |
41 | 235 transaction.add(self.indexfile, n * len(entry)) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
236 self.opener(self.indexfile, "a").write(entry) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
237 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
238 self.cache = (node, n, text) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
239 return node |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
240 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
241 def ancestor(self, a, b): |
45
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
242 def expand(list, map): |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
243 a = [] |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
244 while list: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
245 n = list.pop(0) |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
246 map[n] = 1 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
247 yield n |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
248 for p in self.parents(n): |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
249 if p != nullid and p not in map: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
250 list.append(p) |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
251 yield nullid |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
252 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
253 amap = {} |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
254 bmap = {} |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
255 ag = expand([a], amap) |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
256 bg = expand([b], bmap) |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
257 adone = bdone = 0 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
258 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
259 while not adone or not bdone: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
260 if not adone: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
261 an = ag.next() |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
262 if an == nullid: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
263 adone = 1 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
264 elif an in bmap: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
265 return an |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
266 if not bdone: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
267 bn = bg.next() |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
268 if bn == nullid: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
269 bdone = 1 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
270 elif bn in amap: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
271 return bn |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
272 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
273 return nullid |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
274 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
275 def mergedag(self, other, transaction, linkseq, accumulate = None): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
276 """combine the nodes from other's DAG into ours""" |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
277 old = self.tip() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
278 i = self.count() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
279 l = [] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
280 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
281 # merge the other revision log into our DAG |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
282 for r in range(other.count()): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
283 id = other.node(r) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
284 if id not in self.nodemap: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
285 (xn, yn) = other.parents(id) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
286 l.append((id, xn, yn)) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
287 self.nodemap[id] = i |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
288 i += 1 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
289 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
290 # merge node date for new nodes |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
291 r = other.revisions([e[0] for e in l]) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
292 for e in l: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
293 t = r.next() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
294 if accumulate: accumulate(t) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
295 self.addrevision(t, transaction, linkseq.next(), e[1], e[2]) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
296 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
297 # return the unmerged heads for later resolving |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
298 return (old, self.tip()) |
46 | 299 |
300 def group(self, linkmap): | |
301 # given a list of changeset revs, return a set of deltas and | |
302 # metadata corresponding to nodes the first delta is | |
303 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to | |
304 # have this parent as it has all history before these | |
305 # changesets. parent is parent[0] | |
306 | |
307 revs = [] | |
308 needed = {} | |
309 | |
310 # find file nodes/revs that match changeset revs | |
311 for i in xrange(0, self.count()): | |
312 if self.index[i][3] in linkmap: | |
313 revs.append(i) | |
314 needed[i] = 1 | |
315 | |
316 # if we don't have any revisions touched by these changesets, bail | |
317 if not revs: return struct.pack(">l", 0) | |
318 | |
319 # add the parent of the first rev | |
320 p = self.parents(self.node(revs[0]))[0] | |
321 revs.insert(0, self.rev(p)) | |
322 | |
323 # for each delta that isn't contiguous in the log, we need to | |
324 # reconstruct the base, reconstruct the result, and then | |
325 # calculate the delta. We also need to do this where we've | |
326 # stored a full version and not a delta | |
327 for i in xrange(0, len(revs) - 1): | |
328 a, b = revs[i], revs[i + 1] | |
329 if a + 1 != b or self.base(b) == b: | |
330 for j in xrange(self.base(a), a + 1): | |
331 needed[j] = 1 | |
332 for j in xrange(self.base(b), b + 1): | |
333 needed[j] = 1 | |
334 | |
335 # calculate spans to retrieve from datafile | |
336 needed = needed.keys() | |
337 needed.sort() | |
338 spans = [] | |
339 for n in needed: | |
340 if n < 0: continue | |
341 o = self.start(n) | |
342 l = self.length(n) | |
343 spans.append((o, l, [(n, l)])) | |
344 | |
345 # merge spans | |
346 merge = [spans.pop(0)] | |
347 while spans: | |
348 e = spans.pop(0) | |
349 f = merge[-1] | |
350 if e[0] == f[0] + f[1]: | |
351 merge[-1] = (f[0], f[1] + e[1], f[2] + e[2]) | |
352 else: | |
353 merge.append(e) | |
354 | |
355 # read spans in, divide up chunks | |
356 chunks = {} | |
357 for span in merge: | |
358 # we reopen the file for each span to make http happy for now | |
359 f = self.opener(self.datafile) | |
360 f.seek(span[0]) | |
361 data = f.read(span[1]) | |
362 | |
363 # divide up the span | |
364 pos = 0 | |
365 for r, l in span[2]: | |
366 chunks[r] = data[pos: pos + l] | |
367 pos += l | |
368 | |
369 # helper to reconstruct intermediate versions | |
370 def construct(text, base, rev): | |
71
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
371 bins = [decompress(chunks[r]) for r in xrange(base + 1, rev + 1)] |
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
372 return mdiff.patches(text, bins) |
46 | 373 |
374 # build deltas | |
375 deltas = [] | |
71
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
376 for d in xrange(0, len(revs) - 1): |
46 | 377 a, b = revs[d], revs[d + 1] |
378 n = self.node(b) | |
379 | |
380 if a + 1 != b or self.base(b) == b: | |
381 if a >= 0: | |
382 base = self.base(a) | |
383 ta = decompress(chunks[self.base(a)]) | |
384 ta = construct(ta, base, a) | |
385 else: | |
386 ta = "" | |
387 | |
388 base = self.base(b) | |
389 if a > base: | |
390 base = a | |
391 tb = ta | |
392 else: | |
393 tb = decompress(chunks[self.base(b)]) | |
394 tb = construct(tb, base, b) | |
395 d = self.diff(ta, tb) | |
396 else: | |
397 d = decompress(chunks[b]) | |
398 | |
399 p = self.parents(n) | |
400 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)] | |
401 l = struct.pack(">l", len(meta) + len(d) + 4) | |
402 deltas.append(l + meta + d) | |
403 | |
404 l = struct.pack(">l", sum(map(len, deltas)) + 4) | |
405 deltas.insert(0, l) | |
406 return "".join(deltas) | |
407 | |
408 def addgroup(self, data, linkmapper, transaction): | |
409 # given a set of deltas, add them to the revision log. the | |
410 # first delta is against its parent, which should be in our | |
411 # log, the rest are against the previous delta. | |
412 | |
65
d40cc5aacc31
Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents:
64
diff
changeset
|
413 if not data: return self.tip() |
46 | 414 |
415 # retrieve the parent revision of the delta chain | |
65
d40cc5aacc31
Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents:
64
diff
changeset
|
416 chain = data[24:44] |
84
b2e3528115da
More useful message on broken addgroup chain
mpm@selenic.com
parents:
83
diff
changeset
|
417 if not chain in self.nodemap: |
b2e3528115da
More useful message on broken addgroup chain
mpm@selenic.com
parents:
83
diff
changeset
|
418 raise "unknown base %s" % short(chain[:4]) |
46 | 419 |
420 # track the base of the current delta log | |
421 r = self.count() | |
422 t = r - 1 | |
423 | |
424 base = prev = -1 | |
425 start = end = 0 | |
426 if r: | |
427 start = self.start(self.base(t)) | |
428 end = self.end(t) | |
429 measure = self.length(self.base(t)) | |
430 base = self.base(t) | |
431 prev = self.tip() | |
432 | |
433 transaction.add(self.datafile, end) | |
434 transaction.add(self.indexfile, r * struct.calcsize(indexformat)) | |
435 dfh = self.opener(self.datafile, "a") | |
436 ifh = self.opener(self.indexfile, "a") | |
437 | |
438 # loop through our set of deltas | |
65
d40cc5aacc31
Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents:
64
diff
changeset
|
439 pos = 0 |
46 | 440 while pos < len(data): |
441 l, node, p1, p2, cs = struct.unpack(">l20s20s20s20s", | |
442 data[pos:pos+84]) | |
77 | 443 if node in self.nodemap: |
444 raise "already have %s" % hex(node[:4]) | |
46 | 445 link = linkmapper(cs) |
446 delta = data[pos + 84:pos + l] | |
447 pos += l | |
448 | |
449 # full versions are inserted when the needed deltas become | |
450 # comparable to the uncompressed text or when the previous | |
451 # version is not the one we have a delta against. We use | |
452 # the size of the previous full rev as a proxy for the | |
453 # current size. | |
454 | |
455 if chain == prev: | |
456 cdelta = compress(delta) | |
457 | |
458 if chain != prev or (end - start + len(cdelta)) > measure * 2: | |
459 # flush our writes here so we can read it in revision | |
460 dfh.flush() | |
461 ifh.flush() | |
65
d40cc5aacc31
Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents:
64
diff
changeset
|
462 text = self.revision(chain) |
73 | 463 text = self.patches(text, [delta]) |
46 | 464 chk = self.addrevision(text, transaction, link, p1, p2) |
465 if chk != node: | |
466 raise "consistency error adding group" | |
467 measure = len(text) | |
468 else: | |
469 e = (end, len(cdelta), self.base(t), link, p1, p2, node) | |
470 self.index.append(e) | |
471 self.nodemap[node] = r | |
472 dfh.write(cdelta) | |
473 ifh.write(struct.pack(indexformat, *e)) | |
474 | |
65
d40cc5aacc31
Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents:
64
diff
changeset
|
475 t, r, chain, prev = r, r + 1, node, node |
46 | 476 start = self.start(self.base(t)) |
477 end = self.end(t) | |
478 | |
479 dfh.close() | |
480 ifh.close() | |
481 return node |