Mercurial > public > mercurial-scm > hg
comparison mercurial/revlog.py @ 46:93e868fa0db8
Add changegroup support
author | mpm@selenic.com |
---|---|
date | Tue, 10 May 2005 00:40:49 -0800 |
parents | f2b2d5daec30 |
children | b3e2ddff0159 |
comparison
equal
deleted
inserted
replaced
45:f2b2d5daec30 | 46:93e868fa0db8 |
---|---|
225 if accumulate: accumulate(t) | 225 if accumulate: accumulate(t) |
226 self.addrevision(t, transaction, linkseq.next(), e[1], e[2]) | 226 self.addrevision(t, transaction, linkseq.next(), e[1], e[2]) |
227 | 227 |
228 # return the unmerged heads for later resolving | 228 # return the unmerged heads for later resolving |
229 return (old, self.tip()) | 229 return (old, self.tip()) |
230 | |
231 def group(self, linkmap): | |
232 # given a list of changeset revs, return a set of deltas and | |
233 # metadata corresponding to nodes the first delta is | |
234 # parent(nodes[0]) -> nodes[0] the receiver is guaranteed to | |
235 # have this parent as it has all history before these | |
236 # changesets. parent is parent[0] | |
237 | |
238 revs = [] | |
239 needed = {} | |
240 | |
241 # find file nodes/revs that match changeset revs | |
242 for i in xrange(0, self.count()): | |
243 if self.index[i][3] in linkmap: | |
244 revs.append(i) | |
245 needed[i] = 1 | |
246 | |
247 # if we don't have any revisions touched by these changesets, bail | |
248 if not revs: return struct.pack(">l", 0) | |
249 | |
250 # add the parent of the first rev | |
251 p = self.parents(self.node(revs[0]))[0] | |
252 revs.insert(0, self.rev(p)) | |
253 | |
254 # for each delta that isn't contiguous in the log, we need to | |
255 # reconstruct the base, reconstruct the result, and then | |
256 # calculate the delta. We also need to do this where we've | |
257 # stored a full version and not a delta | |
258 for i in xrange(0, len(revs) - 1): | |
259 a, b = revs[i], revs[i + 1] | |
260 if a + 1 != b or self.base(b) == b: | |
261 for j in xrange(self.base(a), a + 1): | |
262 needed[j] = 1 | |
263 for j in xrange(self.base(b), b + 1): | |
264 needed[j] = 1 | |
265 | |
266 # calculate spans to retrieve from datafile | |
267 needed = needed.keys() | |
268 needed.sort() | |
269 spans = [] | |
270 for n in needed: | |
271 if n < 0: continue | |
272 o = self.start(n) | |
273 l = self.length(n) | |
274 spans.append((o, l, [(n, l)])) | |
275 | |
276 # merge spans | |
277 merge = [spans.pop(0)] | |
278 while spans: | |
279 e = spans.pop(0) | |
280 f = merge[-1] | |
281 if e[0] == f[0] + f[1]: | |
282 merge[-1] = (f[0], f[1] + e[1], f[2] + e[2]) | |
283 else: | |
284 merge.append(e) | |
285 | |
286 # read spans in, divide up chunks | |
287 chunks = {} | |
288 for span in merge: | |
289 # we reopen the file for each span to make http happy for now | |
290 f = self.opener(self.datafile) | |
291 f.seek(span[0]) | |
292 data = f.read(span[1]) | |
293 | |
294 # divide up the span | |
295 pos = 0 | |
296 for r, l in span[2]: | |
297 chunks[r] = data[pos: pos + l] | |
298 pos += l | |
299 | |
300 # helper to reconstruct intermediate versions | |
301 def construct(text, base, rev): | |
302 for r in range(base + 1, rev + 1): | |
303 b = decompress(chunks[r]) | |
304 text = self.patch(text, b) | |
305 return text | |
306 | |
307 # build deltas | |
308 deltas = [] | |
309 for d in range(0, len(revs) - 1): | |
310 a, b = revs[d], revs[d + 1] | |
311 n = self.node(b) | |
312 | |
313 if a + 1 != b or self.base(b) == b: | |
314 if a >= 0: | |
315 base = self.base(a) | |
316 ta = decompress(chunks[self.base(a)]) | |
317 ta = construct(ta, base, a) | |
318 else: | |
319 ta = "" | |
320 | |
321 base = self.base(b) | |
322 if a > base: | |
323 base = a | |
324 tb = ta | |
325 else: | |
326 tb = decompress(chunks[self.base(b)]) | |
327 tb = construct(tb, base, b) | |
328 d = self.diff(ta, tb) | |
329 else: | |
330 d = decompress(chunks[b]) | |
331 | |
332 p = self.parents(n) | |
333 meta = n + p[0] + p[1] + linkmap[self.linkrev(n)] | |
334 l = struct.pack(">l", len(meta) + len(d) + 4) | |
335 deltas.append(l + meta + d) | |
336 | |
337 l = struct.pack(">l", sum(map(len, deltas)) + 4) | |
338 deltas.insert(0, l) | |
339 return "".join(deltas) | |
340 | |
341 def addgroup(self, data, linkmapper, transaction): | |
342 # given a set of deltas, add them to the revision log. the | |
343 # first delta is against its parent, which should be in our | |
344 # log, the rest are against the previous delta. | |
345 | |
346 if len(data) <= 4: return | |
347 | |
348 # retrieve the parent revision of the delta chain | |
349 chain = data[28:48] | |
350 text = self.revision(chain) | |
351 | |
352 # track the base of the current delta log | |
353 r = self.count() | |
354 t = r - 1 | |
355 | |
356 base = prev = -1 | |
357 start = end = 0 | |
358 if r: | |
359 start = self.start(self.base(t)) | |
360 end = self.end(t) | |
361 measure = self.length(self.base(t)) | |
362 base = self.base(t) | |
363 prev = self.tip() | |
364 | |
365 transaction.add(self.datafile, end) | |
366 transaction.add(self.indexfile, r * struct.calcsize(indexformat)) | |
367 dfh = self.opener(self.datafile, "a") | |
368 ifh = self.opener(self.indexfile, "a") | |
369 | |
370 # loop through our set of deltas | |
371 pos = 4 | |
372 while pos < len(data): | |
373 l, node, p1, p2, cs = struct.unpack(">l20s20s20s20s", | |
374 data[pos:pos+84]) | |
375 link = linkmapper(cs) | |
376 delta = data[pos + 84:pos + l] | |
377 pos += l | |
378 | |
379 # full versions are inserted when the needed deltas become | |
380 # comparable to the uncompressed text or when the previous | |
381 # version is not the one we have a delta against. We use | |
382 # the size of the previous full rev as a proxy for the | |
383 # current size. | |
384 | |
385 if chain == prev: | |
386 cdelta = compress(delta) | |
387 | |
388 if chain != prev or (end - start + len(cdelta)) > measure * 2: | |
389 # flush our writes here so we can read it in revision | |
390 dfh.flush() | |
391 ifh.flush() | |
392 text = self.revision(self.node(t)) | |
393 text = self.patch(text, delta) | |
394 chk = self.addrevision(text, transaction, link, p1, p2) | |
395 if chk != node: | |
396 raise "consistency error adding group" | |
397 measure = len(text) | |
398 else: | |
399 e = (end, len(cdelta), self.base(t), link, p1, p2, node) | |
400 self.index.append(e) | |
401 self.nodemap[node] = r | |
402 dfh.write(cdelta) | |
403 ifh.write(struct.pack(indexformat, *e)) | |
404 | |
405 t, r = r, r + 1 | |
406 chain = prev | |
407 start = self.start(self.base(t)) | |
408 end = self.end(t) | |
409 | |
410 dfh.close() | |
411 ifh.close() | |
412 return node |