126
124
ie.text_size = None
127
125
ie.text_sha1 = None
129
ie.text_size = len(blob.data)
130
ie.text_sha1 = osutils.sha_string(blob.data)
127
ie.text_size = sum(imap(len, blob.chunked))
128
ie.text_sha1 = osutils.sha_strings(blob.chunked)
131
129
# Check what revision we should store
133
131
for pinv in parent_invs:
134
if pinv.revision_id == base_inv.revision_id:
143
if pie.text_sha1 == ie.text_sha1 and pie.executable == ie.executable and pie.symlink_target == ie.symlink_target:
136
if (pie.text_sha1 == ie.text_sha1 and
137
pie.executable == ie.executable and
138
pie.symlink_target == ie.symlink_target):
144
139
# found a revision in one of the parents to use
145
140
ie.revision = pie.revision
147
parent_keys.append((file_id, pie.revision))
142
parent_key = (file_id, pie.revision)
143
if not parent_key in parent_keys:
144
parent_keys.append(parent_key)
148
145
if ie.revision is None:
149
146
# Need to store a new revision
150
147
ie.revision = revision_id
151
assert file_id is not None
152
148
assert ie.revision is not None
153
texts.insert_record_stream([FulltextContentFactory((file_id, ie.revision), tuple(parent_keys), ie.text_sha1, blob.data)])
154
shamap = [(hexsha, "blob", (ie.file_id, ie.revision))]
149
if ie.kind == 'symlink':
152
chunks = blob.chunked
153
texts.insert_record_stream([
154
ChunkedContentFactory((file_id, ie.revision),
155
tuple(parent_keys), ie.text_sha1, chunks)])
158
if base_ie is not None:
159
old_path = base_inv.id2path(file_id)
160
if base_ie.kind == "directory":
161
invdelta.extend(remove_disappeared_children(old_path, base_ie.children, []))
157
if base_hexsha is not None:
158
old_path = path # Renames are not supported yet
159
if stat.S_ISDIR(base_mode):
160
invdelta.extend(remove_disappeared_children(base_inv, old_path,
161
lookup_object(base_hexsha), [], lookup_object))
164
164
invdelta.append((old_path, path, file_id, ie))
165
return (invdelta, shamap)
165
if base_hexsha != hexsha:
166
store_updater.add_object(blob, ie)
168
170
class SubmodulesRequireSubtrees(BzrError):
173
def import_git_submodule(texts, mapping, path, hexsha, base_inv, base_ie,
174
parent_id, revision_id, parent_invs, shagitmap, lookup_object):
175
file_id = mapping.generate_file_id(path)
176
ie = TreeReference(file_id, urlutils.basename(path.decode("utf-8")),
175
def import_git_submodule(texts, mapping, path, name, (base_hexsha, hexsha),
176
base_inv, parent_id, revision_id, parent_invs, lookup_object,
177
(base_mode, mode), store_updater, lookup_file_id):
178
if base_hexsha == hexsha and base_mode == mode:
180
file_id = lookup_file_id(path)
181
ie = TreeReference(file_id, name.decode("utf-8"), parent_id)
178
182
ie.revision = revision_id
183
if base_hexsha is None:
183
if (base_ie.kind == ie.kind and
184
base_ie.reference_revision == ie.reference_revision):
185
ie.revision = base_ie.revision
186
187
ie.reference_revision = mapping.revision_id_foreign_to_bzr(hexsha)
187
texts.insert_record_stream([FulltextContentFactory((file_id, ie.revision), (), None, "")])
188
texts.insert_record_stream([
189
ChunkedContentFactory((file_id, ie.revision), (), None, [])])
188
190
invdelta = [(oldpath, path, file_id, ie)]
189
return invdelta, {}, {}
192
def remove_disappeared_children(path, base_children, existing_children):
194
def remove_disappeared_children(base_inv, path, base_tree, existing_children,
194
deletable = [(osutils.pathjoin(path, k), v) for k,v in base_children.iteritems() if k not in existing_children]
196
(path, ie) = deletable.pop()
197
ret.append((path, None, ie.file_id, None))
198
if ie.kind == "directory":
199
for name, child_ie in ie.children.iteritems():
200
deletable.append((osutils.pathjoin(path, name), child_ie))
197
for name, mode, hexsha in base_tree.iteritems():
198
if name in existing_children:
200
c_path = posixpath.join(path, name.decode("utf-8"))
201
ret.append((c_path, None, base_inv.path2id(c_path), None))
202
if stat.S_ISDIR(mode):
203
ret.extend(remove_disappeared_children(
204
base_inv, c_path, lookup_object(hexsha), [], lookup_object))
204
def import_git_tree(texts, mapping, path, hexsha, base_inv, base_ie, parent_id,
205
revision_id, parent_invs, shagitmap, lookup_object, allow_submodules=False):
208
def import_git_tree(texts, mapping, path, name, (base_hexsha, hexsha),
209
base_inv, parent_id, revision_id, parent_invs,
210
lookup_object, (base_mode, mode), store_updater,
211
lookup_file_id, allow_submodules=False):
206
212
"""Import a git tree object into a bzr repository.
208
214
:param texts: VersionedFiles object to add to
211
217
:param base_inv: Base inventory against which to return inventory delta
212
218
:return: Inventory delta for this subtree
220
if base_hexsha == hexsha and base_mode == mode:
221
# If nothing has changed since the base revision, we're done
215
file_id = mapping.generate_file_id(path)
224
file_id = lookup_file_id(path)
216
225
# We just have to hope this is indeed utf-8:
217
ie = InventoryDirectory(file_id, urlutils.basename(path.decode("utf-8")),
220
# Newly appeared here
226
ie = InventoryDirectory(file_id, name.decode("utf-8"), parent_id)
227
tree = lookup_object(hexsha)
228
if base_hexsha is None:
230
old_path = None # Newly appeared here
232
base_tree = lookup_object(base_hexsha)
233
old_path = path # Renames aren't supported yet
234
if base_tree is None or type(base_tree) is not Tree:
221
235
ie.revision = revision_id
222
texts.insert_record_stream([FulltextContentFactory((file_id, ie.revision), (), None, "")])
223
invdelta.append((None, path, file_id, ie))
225
# See if this has changed at all
227
base_sha = shagitmap.lookup_tree(file_id, base_inv.revision_id)
231
if base_sha == hexsha:
232
# If nothing has changed since the base revision, we're done
234
if base_ie.kind != "directory":
235
ie.revision = revision_id
236
texts.insert_record_stream([FulltextContentFactory((ie.file_id, ie.revision), (), None, "")])
237
invdelta.append((base_inv.id2path(ie.file_id), path, ie.file_id, ie))
238
if base_ie is not None and base_ie.kind == "directory":
239
base_children = base_ie.children
236
invdelta.append((old_path, path, ie.file_id, ie))
237
texts.insert_record_stream([
238
ChunkedContentFactory((ie.file_id, ie.revision), (), None, [])])
242
239
# Remember for next time
243
240
existing_children = set()
246
tree = lookup_object(hexsha)
247
for mode, name, child_hexsha in tree.entries():
248
basename = name.decode("utf-8")
249
existing_children.add(basename)
250
child_path = osutils.pathjoin(path, name)
251
if stat.S_ISDIR(mode):
252
subinvdelta, grandchildmodes, subshamap = import_git_tree(
253
texts, mapping, child_path, child_hexsha, base_inv,
254
base_children.get(basename), file_id, revision_id,
255
parent_invs, shagitmap, lookup_object,
256
allow_submodules=allow_submodules)
257
invdelta.extend(subinvdelta)
258
child_modes.update(grandchildmodes)
259
shamap.extend(subshamap)
260
elif S_ISGITLINK(mode): # submodule
242
for child_mode, name, child_hexsha in tree.entries():
243
existing_children.add(name)
244
child_path = posixpath.join(path, name)
245
if type(base_tree) is Tree:
247
child_base_mode, child_base_hexsha = base_tree[name]
249
child_base_hexsha = None
252
child_base_hexsha = None
254
if stat.S_ISDIR(child_mode):
255
subinvdelta, grandchildmodes = import_git_tree(texts, mapping,
256
child_path, name, (child_base_hexsha, child_hexsha), base_inv,
257
file_id, revision_id, parent_invs, lookup_object,
258
(child_base_mode, child_mode), store_updater, lookup_file_id,
259
allow_submodules=allow_submodules)
260
elif S_ISGITLINK(child_mode): # submodule
261
261
if not allow_submodules:
262
262
raise SubmodulesRequireSubtrees()
263
subinvdelta, grandchildmodes, subshamap = import_git_submodule(
264
texts, mapping, child_path, child_hexsha, base_inv, base_children.get(basename),
265
file_id, revision_id, parent_invs, shagitmap, lookup_object)
266
invdelta.extend(subinvdelta)
267
child_modes.update(grandchildmodes)
268
shamap.extend(subshamap)
263
subinvdelta, grandchildmodes = import_git_submodule(texts, mapping,
264
child_path, name, (child_base_hexsha, child_hexsha), base_inv,
265
file_id, revision_id, parent_invs, lookup_object,
266
(child_base_mode, child_mode), store_updater, lookup_file_id)
270
subinvdelta, subshamap = import_git_blob(texts, mapping,
271
child_path, child_hexsha, base_inv, base_children.get(basename), file_id,
272
revision_id, parent_invs, shagitmap, lookup_object,
273
mode_is_executable(mode), stat.S_ISLNK(mode))
274
invdelta.extend(subinvdelta)
275
shamap.extend(subshamap)
276
if mode not in (stat.S_IFDIR, DEFAULT_FILE_MODE,
268
subinvdelta = import_git_blob(texts, mapping, child_path, name,
269
(child_base_hexsha, child_hexsha), base_inv, file_id,
270
revision_id, parent_invs, lookup_object,
271
(child_base_mode, child_mode), store_updater, lookup_file_id)
273
child_modes.update(grandchildmodes)
274
invdelta.extend(subinvdelta)
275
if child_mode not in (stat.S_IFDIR, DEFAULT_FILE_MODE,
277
276
stat.S_IFLNK, DEFAULT_FILE_MODE|0111):
278
child_modes[child_path] = mode
277
child_modes[child_path] = child_mode
279
278
# Remove any children that have disappeared
280
if base_ie is not None and base_ie.kind == "directory":
281
invdelta.extend(remove_disappeared_children(base_inv.id2path(file_id),
282
base_children, existing_children))
283
shamap.append((hexsha, "tree", (file_id, revision_id)))
284
return invdelta, child_modes, shamap
287
def approx_inv_size(inv):
288
# Very rough estimate, 1k per inventory entry
289
return len(inv) * 1024
279
if base_tree is not None and type(base_tree) is Tree:
280
invdelta.extend(remove_disappeared_children(base_inv, old_path,
281
base_tree, existing_children, lookup_object))
282
store_updater.add_object(tree, ie)
283
return invdelta, child_modes
286
def verify_commit_reconstruction(target_git_object_retriever, lookup_object,
287
o, rev, ret_tree, parent_trees, mapping, unusual_modes):
288
new_unusual_modes = mapping.export_unusual_file_modes(rev)
289
if new_unusual_modes != unusual_modes:
290
raise AssertionError("unusual modes don't match: %r != %r" % (
291
unusual_modes, new_unusual_modes))
292
# Verify that we can reconstruct the commit properly
293
rec_o = target_git_object_retriever._reconstruct_commit(rev, o.tree)
295
raise AssertionError("Reconstructed commit differs: %r != %r" % (
299
for path, obj, ie in _tree_to_objects(ret_tree, parent_trees,
300
target_git_object_retriever._cache.idmap, unusual_modes, mapping.BZR_DUMMY_FILE):
301
old_obj_id = tree_lookup_path(lookup_object, o.tree, path)[1]
303
if obj.id != old_obj_id:
304
diff.append((path, lookup_object(old_obj_id), obj))
305
for (path, old_obj, new_obj) in diff:
306
while (old_obj.type_name == "tree" and
307
new_obj.type_name == "tree" and
308
sorted(old_obj) == sorted(new_obj)):
310
if old_obj[name][0] != new_obj[name][0]:
311
raise AssertionError("Modes for %s differ: %o != %o" %
312
(path, old_obj[name][0], new_obj[name][0]))
313
if old_obj[name][1] != new_obj[name][1]:
314
# Found a differing child, delve deeper
315
path = posixpath.join(path, name)
316
old_obj = lookup_object(old_obj[name][1])
317
new_obj = new_objs[path]
319
raise AssertionError("objects differ for %s: %r != %r" % (path,
292
323
def import_git_commit(repo, mapping, head, lookup_object,
293
target_git_object_retriever, parent_invs_cache):
324
target_git_object_retriever, trees_cache):
294
325
o = lookup_object(head)
295
rev = mapping.import_commit(o)
326
rev = mapping.import_commit(o,
327
lambda x: target_git_object_retriever.lookup_git_sha(x)[1][0])
296
328
# We have to do this here, since we have to walk the tree and
297
329
# we need to make sure to import the blobs / trees with the right
298
330
# path; this may involve adding them more than once.
300
for parent_id in rev.parent_ids:
302
parent_invs.append(parent_invs_cache[parent_id])
304
parent_inv = repo.get_inventory(parent_id)
305
parent_invs.append(parent_inv)
306
parent_invs_cache[parent_id] = parent_inv
307
if parent_invs == []:
331
parent_trees = trees_cache.revision_trees(rev.parent_ids)
332
if parent_trees == []:
308
333
base_inv = Inventory(root_id=None)
311
base_inv = parent_invs[0]
312
base_ie = base_inv.root
313
inv_delta, unusual_modes, shamap = import_git_tree(repo.texts,
314
mapping, "", o.tree, base_inv, base_ie, None, rev.revision_id,
315
parent_invs, target_git_object_retriever._idmap, lookup_object,
337
base_inv = parent_trees[0].inventory
338
base_tree = lookup_object(o.parents[0]).tree
339
base_mode = stat.S_IFDIR
340
store_updater = target_git_object_retriever._get_updater(rev)
341
store_updater.add_object(o, None)
342
lookup_file_id = mapping.get_fileid_map(lookup_object, o.tree).lookup_file_id
343
inv_delta, unusual_modes = import_git_tree(repo.texts,
344
mapping, "", u"", (base_tree, o.tree), base_inv,
345
None, rev.revision_id, [p.inventory for p in parent_trees],
346
lookup_object, (base_mode, stat.S_IFDIR), store_updater,
316
348
allow_submodules=getattr(repo._format, "supports_tree_reference", False))
317
target_git_object_retriever._idmap.add_entries(shamap)
349
store_updater.finish()
318
350
if unusual_modes != {}:
319
351
for path, mode in unusual_modes.iteritems():
320
352
warn_unusual_mode(rev.foreign_revid, path, mode)
327
359
rev.inventory_sha1, inv = repo.add_inventory_by_delta(basis_id,
328
360
inv_delta, rev.revision_id, rev.parent_ids,
330
parent_invs_cache[rev.revision_id] = inv
362
ret_tree = RevisionTree(repo, inv, rev.revision_id)
363
trees_cache.add(ret_tree)
331
364
repo.add_revision(rev.revision_id, rev)
332
365
if "verify" in debug.debug_flags:
333
new_unusual_modes = mapping.export_unusual_file_modes(rev)
334
if new_unusual_modes != unusual_modes:
335
raise AssertionError("unusual modes don't match: %r != %r" % (unusual_modes, new_unusual_modes))
336
objs = inventory_to_tree_and_blobs(inv, repo.texts, mapping, unusual_modes)
337
for sha1, newobj, path in objs:
338
assert path is not None
339
oldobj = tree_lookup_path(lookup_object, o.tree, path)
341
raise AssertionError("%r != %r in %s" % (oldobj, newobj, path))
344
def import_git_objects(repo, mapping, object_iter, target_git_object_retriever,
366
verify_commit_reconstruction(target_git_object_retriever,
367
lookup_object, o, rev, ret_tree, parent_trees, mapping,
371
def import_git_objects(repo, mapping, object_iter,
372
target_git_object_retriever, heads, pb=None, limit=None):
346
373
"""Import a set of git objects into a bzr repository.
348
375
:param repo: Target Bazaar repository
349
376
:param mapping: Mapping to use
350
377
:param object_iter: Iterator over Git objects.
378
:return: Tuple with pack hints and last imported revision id
352
target_git_object_retriever._idmap.start_write_group() # FIXME: try/finally
353
380
def lookup_object(sha):
355
382
return object_iter[sha]
357
384
return target_git_object_retriever[sha]
358
# TODO: a more (memory-)efficient implementation of this
362
parent_invs_cache = lru_cache.LRUSizeCache(compute_size=approx_inv_size,
363
max_size=MAX_INV_CACHE_SIZE)
387
heads = list(set(heads))
388
trees_cache = LRUTreeCache(repo)
364
389
# Find and convert commit objects
366
391
if pb is not None:
371
396
o = lookup_object(head)
373
trace.mutter('missing head %s', head)
375
399
if isinstance(o, Commit):
376
rev = mapping.import_commit(o)
400
rev = mapping.import_commit(o, lambda x: None)
377
401
if repo.has_revision(rev.revision_id):
379
squash_revision(repo, rev)
380
403
graph.append((o.id, o.parents))
381
target_git_object_retriever._idmap.add_entry(o.id, "commit",
382
(rev.revision_id, o.tree))
383
404
heads.extend([p for p in o.parents if p not in checked])
384
405
elif isinstance(o, Tag):
385
heads.append(o.object[1])
406
if o.object[1] not in checked:
407
heads.append(o.object[1])
387
409
trace.warning("Unable to import head object %r" % o)
388
410
checked.add(o.id)
390
412
# Order the revisions
391
413
# Create the inventory objects
393
415
revision_ids = topo_sort(graph)
417
if limit is not None:
418
revision_ids = revision_ids[:limit]
395
420
for offset in range(0, len(revision_ids), batch_size):
396
repo.start_write_group()
421
target_git_object_retriever.start_write_group()
398
for i, head in enumerate(revision_ids[offset:offset+batch_size]):
400
pb.update("fetching revisions", offset+i, len(revision_ids))
401
import_git_commit(repo, mapping, head, lookup_object,
402
target_git_object_retriever,
423
repo.start_write_group()
425
for i, head in enumerate(
426
revision_ids[offset:offset+batch_size]):
428
pb.update("fetching revisions", offset+i,
430
import_git_commit(repo, mapping, head, lookup_object,
431
target_git_object_retriever, trees_cache)
434
repo.abort_write_group()
437
hint = repo.commit_write_group()
439
pack_hints.extend(hint)
405
repo.abort_write_group()
441
target_git_object_retriever.abort_write_group()
408
hint = repo.commit_write_group()
410
pack_hints.extend(hint)
411
target_git_object_retriever._idmap.commit_write_group()
444
target_git_object_retriever.commit_write_group()
445
return pack_hints, last_imported
415
448
class InterGitRepository(InterRepository):