Mercurial > public > mercurial-scm > hg-stable
annotate mercurial/revlog.py @ 94:7daef883134f
Refactor merge code
Delete old code
Fix calculation of newer nodes on server
Fix branch recursion on client
Fix manifest merge problems
Add more debugging and note messages to merge
author | mpm@selenic.com |
---|---|
date | Wed, 18 May 2005 16:29:39 -0800 |
parents | 1b945e8ba67b |
children | 7a2abee6b0c2 |
rev | line source |
---|---|
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
1 # revlog.py - storage back-end for mercurial |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
2 # |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
3 # This provides efficient delta storage with O(1) retrieve and append |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
4 # and O(changes) merge between branches |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
5 # |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
6 # Copyright 2005 Matt Mackall <mpm@selenic.com> |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
7 # |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
8 # This software may be used and distributed according to the terms |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
9 # of the GNU General Public License, incorporated herein by reference. |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
10 |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
11 import zlib, struct, sha, os, tempfile, binascii |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
12 from mercurial import mdiff |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
13 |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
14 def hex(node): return binascii.hexlify(node) |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
15 def bin(node): return binascii.unhexlify(node) |
83 | 16 def short(node): return hex(node[:4]) |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
17 |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
18 def compress(text): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
19 return zlib.compress(text) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
20 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
21 def decompress(bin): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
22 return zlib.decompress(bin) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
23 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
24 def hash(text, p1, p2): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
25 l = [p1, p2] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
26 l.sort() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
27 return sha.sha(l[0] + l[1] + text).digest() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
28 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
29 nullid = "\0" * 20 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
30 indexformat = ">4l20s20s20s" |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
31 |
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
32 class lazyparser: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
33 def __init__(self, data): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
34 self.data = data |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
35 self.s = struct.calcsize(indexformat) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
36 self.l = len(data)/self.s |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
37 self.index = [None] * self.l |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
38 self.map = {nullid: -1} |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
39 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
40 if 0: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
41 n = 0 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
42 i = self.data |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
43 s = struct.calcsize(indexformat) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
44 for f in xrange(0, len(i), s): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
45 # offset, size, base, linkrev, p1, p2, nodeid |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
46 e = struct.unpack(indexformat, i[f:f + s]) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
47 self.map[e[6]] = n |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
48 self.index.append(e) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
49 n += 1 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
50 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
51 def load(self, pos): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
52 block = pos / 1000 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
53 i = block * 1000 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
54 end = min(self.l, i + 1000) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
55 while i < end: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
56 d = self.data[i * self.s: (i + 1) * self.s] |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
57 e = struct.unpack(indexformat, d) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
58 self.index[i] = e |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
59 self.map[e[6]] = i |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
60 i += 1 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
61 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
62 class lazyindex: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
63 def __init__(self, parser): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
64 self.p = parser |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
65 def __len__(self): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
66 return len(self.p.index) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
67 def __getitem__(self, pos): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
68 i = self.p.index[pos] |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
69 if not i: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
70 self.p.load(pos) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
71 return self.p.index[pos] |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
72 return i |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
73 def append(self, e): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
74 self.p.index.append(e) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
75 |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
76 class lazymap: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
77 def __init__(self, parser): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
78 self.p = parser |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
79 def load(self, key): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
80 n = self.p.data.find(key) |
86
1b945e8ba67b
Friendlier exceptions for unknown node errors
mpm@selenic.com
parents:
84
diff
changeset
|
81 if n < 0: raise KeyError("node " + hex(key)) |
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
82 pos = n / self.p.s |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
83 self.p.load(pos) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
84 def __contains__(self, key): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
85 try: |
77 | 86 self[key] |
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
87 return True |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
88 except KeyError: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
89 return False |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
90 def __getitem__(self, key): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
91 try: |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
92 return self.p.map[key] |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
93 except KeyError: |
86
1b945e8ba67b
Friendlier exceptions for unknown node errors
mpm@selenic.com
parents:
84
diff
changeset
|
94 try: |
1b945e8ba67b
Friendlier exceptions for unknown node errors
mpm@selenic.com
parents:
84
diff
changeset
|
95 self.load(key) |
1b945e8ba67b
Friendlier exceptions for unknown node errors
mpm@selenic.com
parents:
84
diff
changeset
|
96 return self.p.map[key] |
1b945e8ba67b
Friendlier exceptions for unknown node errors
mpm@selenic.com
parents:
84
diff
changeset
|
97 except KeyError: |
1b945e8ba67b
Friendlier exceptions for unknown node errors
mpm@selenic.com
parents:
84
diff
changeset
|
98 raise KeyError("node " + hex(key)) |
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
99 def __setitem__(self, key, val): |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
100 self.p.map[key] = val |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
101 |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
102 class revlog: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
103 def __init__(self, opener, indexfile, datafile): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
104 self.indexfile = indexfile |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
105 self.datafile = datafile |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
106 self.opener = opener |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
107 self.cache = None |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
108 # read the whole index for now, handle on-demand later |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
109 try: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
110 i = self.opener(self.indexfile).read() |
73 | 111 except IOError: |
76
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
112 i = "" |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
113 parser = lazyparser(i) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
114 self.index = lazyindex(parser) |
d993ebd69d28
Add lazy{parser,index,map} to speed up processing of index files
mpm@selenic.com
parents:
73
diff
changeset
|
115 self.nodemap = lazymap(parser) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
116 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
117 def tip(self): return self.node(len(self.index) - 1) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
118 def count(self): return len(self.index) |
26 | 119 def node(self, rev): return (rev < 0) and nullid or self.index[rev][6] |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
120 def rev(self, node): return self.nodemap[node] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
121 def linkrev(self, node): return self.index[self.nodemap[node]][3] |
2 | 122 def parents(self, node): |
123 if node == nullid: return (nullid, nullid) | |
124 return self.index[self.nodemap[node]][4:6] | |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
125 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
126 def start(self, rev): return self.index[rev][0] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
127 def length(self, rev): return self.index[rev][1] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
128 def end(self, rev): return self.start(rev) + self.length(rev) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
129 def base(self, rev): return self.index[rev][2] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
130 |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
131 def lookup(self, id): |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
132 try: |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
133 rev = int(id) |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
134 return self.node(rev) |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
135 except ValueError: |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
136 c = [] |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
137 for n in self.nodemap: |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
138 if id in hex(n): |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
139 c.append(n) |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
140 if len(c) > 1: raise KeyError("Ambiguous identifier") |
67 | 141 if len(c) < 1: raise KeyError("No match found") |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
142 return c[0] |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
143 |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
144 return None |
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
145 |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
146 def diff(self, a, b): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
147 return mdiff.textdiff(a, b) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
148 |
73 | 149 def patches(self, t, pl): |
150 return mdiff.patches(t, pl) | |
151 | |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
152 def revision(self, node): |
36
da28286bf6b7
Add smart node lookup by substring or by rev number
mpm@selenic.com
parents:
26
diff
changeset
|
153 if node == nullid: return "" |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
154 if self.cache and self.cache[0] == node: return self.cache[2] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
155 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
156 text = None |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
157 rev = self.rev(node) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
158 base = self.base(rev) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
159 start = self.start(base) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
160 end = self.end(rev) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
161 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
162 if self.cache and self.cache[1] >= base and self.cache[1] < rev: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
163 base = self.cache[1] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
164 start = self.start(base + 1) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
165 text = self.cache[2] |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
166 last = 0 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
167 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
168 f = self.opener(self.datafile) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
169 f.seek(start) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
170 data = f.read(end - start) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
171 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
172 if not text: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
173 last = self.length(base) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
174 text = decompress(data[:last]) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
175 |
71
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
176 bins = [] |
64 | 177 for r in xrange(base + 1, rev + 1): |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
178 s = self.length(r) |
71
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
179 bins.append(decompress(data[last:last + s])) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
180 last = last + s |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
181 |
71
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
182 text = mdiff.patches(text, bins) |
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
183 |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
184 (p1, p2) = self.parents(node) |
26 | 185 if node != hash(text, p1, p2): |
186 raise "integrity check failed on %s:%d" % (self.datafile, rev) | |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
187 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
188 self.cache = (node, rev, text) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
189 return text |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
190 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
191 def addrevision(self, text, transaction, link, p1=None, p2=None): |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
192 if text is None: text = "" |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
193 if p1 is None: p1 = self.tip() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
194 if p2 is None: p2 = nullid |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
195 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
196 node = hash(text, p1, p2) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
197 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
198 n = self.count() |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
199 t = n - 1 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
200 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
201 if n: |
64 | 202 base = self.base(t) |
203 start = self.start(base) | |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
204 end = self.end(t) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
205 prev = self.revision(self.tip()) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
206 data = compress(self.diff(prev, text)) |
64 | 207 dist = end - start + len(data) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
208 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
209 # full versions are inserted when the needed deltas |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
210 # become comparable to the uncompressed text |
64 | 211 if not n or dist > len(text) * 2: |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
212 data = compress(text) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
213 base = n |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
214 else: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
215 base = self.base(t) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
216 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
217 offset = 0 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
218 if t >= 0: |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
219 offset = self.end(t) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
220 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
221 e = (offset, len(data), base, link, p1, p2, node) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
222 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
223 self.index.append(e) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
224 self.nodemap[node] = n |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
225 entry = struct.pack(indexformat, *e) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
226 |
26 | 227 transaction.add(self.datafile, e[0]) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
228 self.opener(self.datafile, "a").write(data) |
41 | 229 transaction.add(self.indexfile, n * len(entry)) |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
230 self.opener(self.indexfile, "a").write(entry) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
231 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
232 self.cache = (node, n, text) |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
233 return node |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
234 |
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
235 def ancestor(self, a, b): |
45
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
236 def expand(list, map): |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
237 a = [] |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
238 while list: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
239 n = list.pop(0) |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
240 map[n] = 1 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
241 yield n |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
242 for p in self.parents(n): |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
243 if p != nullid and p not in map: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
244 list.append(p) |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
245 yield nullid |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
246 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
247 amap = {} |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
248 bmap = {} |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
249 ag = expand([a], amap) |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
250 bg = expand([b], bmap) |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
251 adone = bdone = 0 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
252 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
253 while not adone or not bdone: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
254 if not adone: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
255 an = ag.next() |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
256 if an == nullid: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
257 adone = 1 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
258 elif an in bmap: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
259 return an |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
260 if not bdone: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
261 bn = bg.next() |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
262 if bn == nullid: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
263 bdone = 1 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
264 elif bn in amap: |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
265 return bn |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
266 |
f2b2d5daec30
Fix recursion depth trouble with ancestor algorithm
mpm@selenic.com
parents:
41
diff
changeset
|
267 return nullid |
0
9117c6561b0b
Add back links from file revisions to changeset revisions
mpm@selenic.com
parents:
diff
changeset
|
268 |
46 | 269 def group(self, linkmap): |
270 # given a list of changeset revs, return a set of deltas and | |
94 | 271 # metadata corresponding to nodes. the first delta is |
46 | 272 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to |
273 # have this parent as it has all history before these | |
274 # changesets. parent is parent[0] | |
275 | |
276 revs = [] | |
277 needed = {} | |
278 | |
279 # find file nodes/revs that match changeset revs | |
280 for i in xrange(0, self.count()): | |
281 if self.index[i][3] in linkmap: | |
282 revs.append(i) | |
283 needed[i] = 1 | |
284 | |
285 # if we don't have any revisions touched by these changesets, bail | |
286 if not revs: return struct.pack(">l", 0) | |
287 | |
288 # add the parent of the first rev | |
289 p = self.parents(self.node(revs[0]))[0] | |
290 revs.insert(0, self.rev(p)) | |
291 | |
292 # for each delta that isn't contiguous in the log, we need to | |
293 # reconstruct the base, reconstruct the result, and then | |
294 # calculate the delta. We also need to do this where we've | |
295 # stored a full version and not a delta | |
296 for i in xrange(0, len(revs) - 1): | |
297 a, b = revs[i], revs[i + 1] | |
298 if a + 1 != b or self.base(b) == b: | |
299 for j in xrange(self.base(a), a + 1): | |
300 needed[j] = 1 | |
301 for j in xrange(self.base(b), b + 1): | |
302 needed[j] = 1 | |
303 | |
304 # calculate spans to retrieve from datafile | |
305 needed = needed.keys() | |
306 needed.sort() | |
307 spans = [] | |
308 for n in needed: | |
309 if n < 0: continue | |
310 o = self.start(n) | |
311 l = self.length(n) | |
312 spans.append((o, l, [(n, l)])) | |
313 | |
314 # merge spans | |
315 merge = [spans.pop(0)] | |
316 while spans: | |
317 e = spans.pop(0) | |
318 f = merge[-1] | |
319 if e[0] == f[0] + f[1]: | |
320 merge[-1] = (f[0], f[1] + e[1], f[2] + e[2]) | |
321 else: | |
322 merge.append(e) | |
323 | |
324 # read spans in, divide up chunks | |
325 chunks = {} | |
326 for span in merge: | |
327 # we reopen the file for each span to make http happy for now | |
328 f = self.opener(self.datafile) | |
329 f.seek(span[0]) | |
330 data = f.read(span[1]) | |
331 | |
332 # divide up the span | |
333 pos = 0 | |
334 for r, l in span[2]: | |
335 chunks[r] = data[pos: pos + l] | |
336 pos += l | |
337 | |
338 # helper to reconstruct intermediate versions | |
339 def construct(text, base, rev): | |
71
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
340 bins = [decompress(chunks[r]) for r in xrange(base + 1, rev + 1)] |
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
341 return mdiff.patches(text, bins) |
46 | 342 |
343 # build deltas | |
344 deltas = [] | |
71
47c9a869adee
Add mdiff.patches to speed up applying thousands of patches to the manifest
mpm@selenic.com
parents:
67
diff
changeset
|
345 for d in xrange(0, len(revs) - 1): |
46 | 346 a, b = revs[d], revs[d + 1] |
347 n = self.node(b) | |
348 | |
349 if a + 1 != b or self.base(b) == b: | |
350 if a >= 0: | |
351 base = self.base(a) | |
352 ta = decompress(chunks[self.base(a)]) | |
353 ta = construct(ta, base, a) | |
354 else: | |
355 ta = "" | |
356 | |
357 base = self.base(b) | |
358 if a > base: | |
359 base = a | |
360 tb = ta | |
361 else: | |
362 tb = decompress(chunks[self.base(b)]) | |
363 tb = construct(tb, base, b) | |
364 d = self.diff(ta, tb) | |
365 else: | |
366 d = decompress(chunks[b]) | |
367 | |
368 p = self.parents(n) | |
369 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)] | |
370 l = struct.pack(">l", len(meta) + len(d) + 4) | |
371 deltas.append(l + meta + d) | |
372 | |
373 l = struct.pack(">l", sum(map(len, deltas)) + 4) | |
374 deltas.insert(0, l) | |
375 return "".join(deltas) | |
376 | |
377 def addgroup(self, data, linkmapper, transaction): | |
378 # given a set of deltas, add them to the revision log. the | |
379 # first delta is against its parent, which should be in our | |
380 # log, the rest are against the previous delta. | |
381 | |
65
d40cc5aacc31
Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents:
64
diff
changeset
|
382 if not data: return self.tip() |
46 | 383 |
384 # retrieve the parent revision of the delta chain | |
65
d40cc5aacc31
Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents:
64
diff
changeset
|
385 chain = data[24:44] |
84
b2e3528115da
More useful message on broken addgroup chain
mpm@selenic.com
parents:
83
diff
changeset
|
386 if not chain in self.nodemap: |
b2e3528115da
More useful message on broken addgroup chain
mpm@selenic.com
parents:
83
diff
changeset
|
387 raise "unknown base %s" % short(chain[:4]) |
46 | 388 |
389 # track the base of the current delta log | |
390 r = self.count() | |
391 t = r - 1 | |
392 | |
393 base = prev = -1 | |
394 start = end = 0 | |
395 if r: | |
396 start = self.start(self.base(t)) | |
397 end = self.end(t) | |
398 measure = self.length(self.base(t)) | |
399 base = self.base(t) | |
400 prev = self.tip() | |
401 | |
402 transaction.add(self.datafile, end) | |
403 transaction.add(self.indexfile, r * struct.calcsize(indexformat)) | |
404 dfh = self.opener(self.datafile, "a") | |
405 ifh = self.opener(self.indexfile, "a") | |
406 | |
407 # loop through our set of deltas | |
65
d40cc5aacc31
Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents:
64
diff
changeset
|
408 pos = 0 |
46 | 409 while pos < len(data): |
410 l, node, p1, p2, cs = struct.unpack(">l20s20s20s20s", | |
411 data[pos:pos+84]) | |
94 | 412 link = linkmapper(cs) |
77 | 413 if node in self.nodemap: |
414 raise "already have %s" % hex(node[:4]) | |
46 | 415 delta = data[pos + 84:pos + l] |
416 pos += l | |
417 | |
418 # full versions are inserted when the needed deltas become | |
419 # comparable to the uncompressed text or when the previous | |
420 # version is not the one we have a delta against. We use | |
421 # the size of the previous full rev as a proxy for the | |
422 # current size. | |
423 | |
424 if chain == prev: | |
425 cdelta = compress(delta) | |
426 | |
427 if chain != prev or (end - start + len(cdelta)) > measure * 2: | |
428 # flush our writes here so we can read it in revision | |
429 dfh.flush() | |
430 ifh.flush() | |
65
d40cc5aacc31
Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents:
64
diff
changeset
|
431 text = self.revision(chain) |
73 | 432 text = self.patches(text, [delta]) |
46 | 433 chk = self.addrevision(text, transaction, link, p1, p2) |
434 if chk != node: | |
435 raise "consistency error adding group" | |
436 measure = len(text) | |
437 else: | |
438 e = (end, len(cdelta), self.base(t), link, p1, p2, node) | |
439 self.index.append(e) | |
440 self.nodemap[node] = r | |
441 dfh.write(cdelta) | |
442 ifh.write(struct.pack(indexformat, *e)) | |
443 | |
65
d40cc5aacc31
Fix up a bunch of bugs in the new merge code
mpm@selenic.com
parents:
64
diff
changeset
|
444 t, r, chain, prev = r, r + 1, node, node |
46 | 445 start = self.start(self.base(t)) |
446 end = self.end(t) | |
447 | |
448 dfh.close() | |
449 ifh.close() | |
450 return node |