Skip to content

Commit 5705018

Browse files
committed
Merge branch 'fromtree'
2 parents 778234d + 1e2265a commit 5705018

File tree

9 files changed

+590
-21
lines changed

9 files changed

+590
-21
lines changed

lib/git/index/base.py

Lines changed: 30 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -59,14 +59,16 @@
5959
)
6060

6161
from fun import (
62+
entry_key,
6263
write_cache,
6364
read_cache,
64-
write_tree_from_cache,
65-
entry_key
65+
aggressive_tree_merge,
66+
write_tree_from_cache
6667
)
6768

6869
from gitdb.base import IStream
6970
from gitdb.db import MemoryDB
71+
from itertools import izip
7072

7173
__all__ = ( 'IndexFile', 'CheckoutError' )
7274

@@ -133,7 +135,6 @@ def _set_cache_(self, attr):
133135
def _index_path(self):
134136
return join_path_native(self.repo.git_dir, "index")
135137

136-
137138
@property
138139
def path(self):
139140
""" :return: Path to the index file we are representing """
@@ -240,6 +241,31 @@ def merge_tree(self, rhs, base=None):
240241
self.repo.git.read_tree(args)
241242
return self
242243

244+
@classmethod
245+
def new(cls, repo, *tree_sha):
246+
""" Merge the given treeish revisions into a new index which is returned.
247+
This method behaves like git-read-tree --aggressive when doing the merge.
248+
249+
:param repo: The repository treeish are located in.
250+
251+
:param *tree_sha:
252+
see ``from_tree``
253+
254+
:return:
255+
New IndexFile instance. Its path will be undefined.
256+
If you intend to write such a merged Index, supply an alternate file_path
257+
to its 'write' method."""
258+
base_entries = aggressive_tree_merge(repo.odb, [str(t) for t in tree_sha])
259+
260+
inst = cls(repo)
261+
# convert to entries dict
262+
entries = dict(izip(((e.path, e.stage) for e in base_entries),
263+
(IndexEntry.from_base(e) for e in base_entries)))
264+
265+
inst.entries = entries
266+
return inst
267+
268+
243269
@classmethod
244270
def from_tree(cls, repo, *treeish, **kwargs):
245271
"""
@@ -275,8 +301,7 @@ def from_tree(cls, repo, *treeish, **kwargs):
275301
276302
As the underlying git-read-tree command takes into account the current index,
277303
it will be temporarily moved out of the way to assure there are no unsuspected
278-
interferences.
279-
"""
304+
interferences."""
280305
if len(treeish) == 0 or len(treeish) > 3:
281306
raise ValueError("Please specify between 1 and 3 treeish, got %i" % len(treeish))
282307

lib/git/index/fun.py

Lines changed: 106 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -5,15 +5,19 @@
55
from stat import S_IFDIR
66
from cStringIO import StringIO
77

8+
from git.utils import IndexFileSHA1Writer
89
from git.errors import UnmergedEntriesError
9-
from git.objects.fun import tree_to_stream
10-
from git.utils import (
11-
IndexFileSHA1Writer,
12-
)
10+
from git.objects.fun import (
11+
tree_to_stream,
12+
traverse_tree_recursive,
13+
traverse_trees_recursive
14+
)
1315

1416
from typ import (
17+
BaseIndexEntry,
1518
IndexEntry,
16-
CE_NAMEMASK
19+
CE_NAMEMASK,
20+
CE_STAGESHIFT
1721
)
1822

1923
from util import (
@@ -23,7 +27,6 @@
2327

2428
from gitdb.base import IStream
2529
from gitdb.typ import str_tree_type
26-
from binascii import a2b_hex
2730

2831
__all__ = ('write_cache', 'read_cache', 'write_tree_from_cache', 'entry_key' )
2932

@@ -75,15 +78,16 @@ def write_cache(entries, stream, extension_data=None, ShaStreamCls=IndexFileSHA1
7578
def read_entry(stream):
7679
"""Return: One entry of the given stream"""
7780
beginoffset = stream.tell()
78-
ctime = unpack(">8s", stream.read(8))[0]
79-
mtime = unpack(">8s", stream.read(8))[0]
81+
read = stream.read
82+
ctime = unpack(">8s", read(8))[0]
83+
mtime = unpack(">8s", read(8))[0]
8084
(dev, ino, mode, uid, gid, size, sha, flags) = \
81-
unpack(">LLLLLL20sH", stream.read(20 + 4 * 6 + 2))
85+
unpack(">LLLLLL20sH", read(20 + 4 * 6 + 2))
8286
path_size = flags & CE_NAMEMASK
83-
path = stream.read(path_size)
87+
path = read(path_size)
8488

8589
real_size = ((stream.tell() - beginoffset + 8) & ~7)
86-
data = stream.read((beginoffset + real_size) - stream.tell())
90+
data = read((beginoffset + real_size) - stream.tell())
8791
return IndexEntry((mode, sha, flags, path, ctime, mtime, dev, ino, uid, gid, size))
8892

8993
def read_header(stream):
@@ -150,6 +154,7 @@ def write_tree_from_cache(entries, odb, sl, si=0):
150154
:return: tuple(binsha, list(tree_entry, ...)) a tuple of a sha and a list of
151155
tree entries being a tuple of hexsha, mode, name"""
152156
tree_items = list()
157+
tree_items_append = tree_items.append
153158
ci = sl.start
154159
end = sl.stop
155160
while ci < end:
@@ -161,7 +166,7 @@ def write_tree_from_cache(entries, odb, sl, si=0):
161166
rbound = entry.path.find('/', si)
162167
if rbound == -1:
163168
# its not a tree
164-
tree_items.append((entry.binsha, entry.mode, entry.path[si:]))
169+
tree_items_append((entry.binsha, entry.mode, entry.path[si:]))
165170
else:
166171
# find common base range
167172
base = entry.path[si:rbound]
@@ -178,7 +183,7 @@ def write_tree_from_cache(entries, odb, sl, si=0):
178183
# enter recursion
179184
# ci - 1 as we want to count our current item as well
180185
sha, tree_entry_list = write_tree_from_cache(entries, odb, slice(ci-1, xi), rbound+1)
181-
tree_items.append((sha, S_IFDIR, base))
186+
tree_items_append((sha, S_IFDIR, base))
182187

183188
# skip ahead
184189
ci = xi
@@ -193,5 +198,93 @@ def write_tree_from_cache(entries, odb, sl, si=0):
193198
istream = odb.store(IStream(str_tree_type, len(sio.getvalue()), sio))
194199
return (istream.binsha, tree_items)
195200

201+
def _tree_entry_to_baseindexentry(tree_entry, stage):
202+
return BaseIndexEntry((tree_entry[1], tree_entry[0], stage <<CE_STAGESHIFT, tree_entry[2]))
196203

204+
def aggressive_tree_merge(odb, tree_shas):
205+
"""
206+
:return: list of BaseIndexEntries representing the aggressive merge of the given
207+
trees. All valid entries are on stage 0, whereas the conflicting ones are left
208+
on stage 1, 2 or 3, whereas stage 1 corresponds to the common ancestor tree,
209+
2 to our tree and 3 to 'their' tree.
210+
:param tree_shas: 1, 2 or 3 trees as identified by their shas
211+
If 1 or two, the entries will effectively correspond to the last given tree
212+
If 3 are given, a 3 way merge is performed"""
213+
out = list()
214+
out_append = out.append
197215

216+
# one and two way is the same for us, as we don't have to handle an existing
217+
# index, instrea
218+
if len(tree_shas) in (1,2):
219+
for entry in traverse_tree_recursive(odb, tree_shas[-1], ''):
220+
out_append(_tree_entry_to_baseindexentry(entry, 0))
221+
# END for each entry
222+
return out
223+
# END handle single tree
224+
225+
if len(tree_shas) > 3:
226+
raise ValueError("Cannot handle %i trees at once" % len(tree_shas))
227+
228+
# three trees
229+
for base, ours, theirs in traverse_trees_recursive(odb, tree_shas, ''):
230+
if base is not None:
231+
# base version exists
232+
if ours is not None:
233+
# ours exists
234+
if theirs is not None:
235+
# it exists in all branches, if it was changed in both
236+
# its a conflict, otherwise we take the changed version
237+
# This should be the most common branch, so it comes first
238+
if( base[0] != ours[0] and base[0] != theirs[0] and ours[0] != theirs[0] ) or \
239+
( base[1] != ours[1] and base[1] != theirs[1] and ours[1] != theirs[1] ):
240+
# changed by both
241+
out_append(_tree_entry_to_baseindexentry(base, 1))
242+
out_append(_tree_entry_to_baseindexentry(ours, 2))
243+
out_append(_tree_entry_to_baseindexentry(theirs, 3))
244+
elif base[0] != ours[0] or base[1] != ours[1]:
245+
# only we changed it
246+
out_append(_tree_entry_to_baseindexentry(ours, 0))
247+
else:
248+
# either nobody changed it, or they did. In either
249+
# case, use theirs
250+
out_append(_tree_entry_to_baseindexentry(theirs, 0))
251+
# END handle modification
252+
else:
253+
254+
if ours[0] != base[0] or ours[1] != base[1]:
255+
# they deleted it, we changed it, conflict
256+
out_append(_tree_entry_to_baseindexentry(base, 1))
257+
out_append(_tree_entry_to_baseindexentry(ours, 2))
258+
# else:
259+
# we didn't change it, ignore
260+
# pass
261+
# END handle our change
262+
# END handle theirs
263+
else:
264+
if theirs is None:
265+
# deleted in both, its fine - its out
266+
pass
267+
else:
268+
if theirs[0] != base[0] or theirs[1] != base[1]:
269+
# deleted in ours, changed theirs, conflict
270+
out_append(_tree_entry_to_baseindexentry(base, 1))
271+
out_append(_tree_entry_to_baseindexentry(theirs, 3))
272+
# END theirs changed
273+
#else:
274+
# theirs didnt change
275+
# pass
276+
# END handle theirs
277+
# END handle ours
278+
else:
279+
# all three can't be None
280+
if ours is None:
281+
# added in their branch
282+
out_append(_tree_entry_to_baseindexentry(theirs, 0))
283+
elif theirs is None:
284+
# added in our branch
285+
out_append(_tree_entry_to_baseindexentry(ours, 0))
286+
# END hanle heads
287+
# END handle base exists
288+
# END for each entries tuple
289+
290+
return out

lib/git/index/typ.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,9 @@ class BaseIndexEntry(tuple):
5656

5757
def __str__(self):
5858
return "%o %s %i\t%s" % (self.mode, self.hexsha, self.stage, self.path)
59+
60+
def __repr__(self):
61+
return "(%o, %s, %i, %s)" % (self.mode, self.hexsha, self.stage, self.path)
5962

6063
@property
6164
def mode(self):

lib/git/objects/fun.py

Lines changed: 118 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,9 @@
22

33
__all__ = ('tree_to_stream', 'tree_entries_from_data')
44

5+
from stat import S_ISDIR
6+
7+
58
def tree_to_stream(entries, write):
69
"""Write the give list of entries into a stream using its write method
710
:param entries: **sorted** list of tuples with (binsha, mode, name)
@@ -64,3 +67,118 @@ def tree_entries_from_data(data):
6467
out.append((sha, mode, name))
6568
# END for each byte in data stream
6669
return out
70+
71+
72+
def _find_by_name(tree_data, name, is_dir, start_at):
73+
"""return data entry matching the given name and tree mode
74+
or None.
75+
Before the item is returned, the respective data item is set
76+
None in the tree_data list to mark it done"""
77+
try:
78+
item = tree_data[start_at]
79+
if item and item[2] == name and S_ISDIR(item[1]) == is_dir:
80+
tree_data[start_at] = None
81+
return item
82+
except IndexError:
83+
pass
84+
# END exception handling
85+
for index, item in enumerate(tree_data):
86+
if item and item[2] == name and S_ISDIR(item[1]) == is_dir:
87+
tree_data[index] = None
88+
return item
89+
# END if item matches
90+
# END for each item
91+
return None
92+
93+
def _to_full_path(item, path_prefix):
94+
"""Rebuild entry with given path prefix"""
95+
if not item:
96+
return item
97+
return (item[0], item[1], path_prefix+item[2])
98+
99+
def traverse_trees_recursive(odb, tree_shas, path_prefix):
100+
"""
101+
:return: list with entries according to the given tree-shas.
102+
The result is encoded in a list
103+
of n tuple|None per blob/commit, (n == len(tree_shas)), where
104+
* [0] == 20 byte sha
105+
* [1] == mode as int
106+
* [2] == path relative to working tree root
107+
The entry tuple is None if the respective blob/commit did not
108+
exist in the given tree.
109+
:param tree_shas: iterable of shas pointing to trees. All trees must
110+
be on the same level. A tree-sha may be None in which case None
111+
:param path_prefix: a prefix to be added to the returned paths on this level,
112+
set it '' for the first iteration
113+
:note: The ordering of the returned items will be partially lost"""
114+
trees_data = list()
115+
nt = len(tree_shas)
116+
for tree_sha in tree_shas:
117+
if tree_sha is None:
118+
data = list()
119+
else:
120+
data = tree_entries_from_data(odb.stream(tree_sha).read())
121+
# END handle muted trees
122+
trees_data.append(data)
123+
# END for each sha to get data for
124+
125+
out = list()
126+
out_append = out.append
127+
128+
# find all matching entries and recursively process them together if the match
129+
# is a tree. If the match is a non-tree item, put it into the result.
130+
# Processed items will be set None
131+
for ti, tree_data in enumerate(trees_data):
132+
for ii, item in enumerate(tree_data):
133+
if not item:
134+
continue
135+
# END skip already done items
136+
entries = [ None for n in range(nt) ]
137+
entries[ti] = item
138+
sha, mode, name = item # its faster to unpack
139+
is_dir = S_ISDIR(mode) # type mode bits
140+
141+
# find this item in all other tree data items
142+
# wrap around, but stop one before our current index, hence
143+
# ti+nt, not ti+1+nt
144+
for tio in range(ti+1, ti+nt):
145+
tio = tio % nt
146+
entries[tio] = _find_by_name(trees_data[tio], name, is_dir, ii)
147+
# END for each other item data
148+
149+
# if we are a directory, enter recursion
150+
if is_dir:
151+
out.extend(traverse_trees_recursive(odb, [((ei and ei[0]) or None) for ei in entries], path_prefix+name+'/'))
152+
else:
153+
out_append(tuple(_to_full_path(e, path_prefix) for e in entries))
154+
# END handle recursion
155+
156+
# finally mark it done
157+
tree_data[ii] = None
158+
# END for each item
159+
160+
# we are done with one tree, set all its data empty
161+
del(tree_data[:])
162+
# END for each tree_data chunk
163+
return out
164+
165+
def traverse_tree_recursive(odb, tree_sha, path_prefix):
166+
"""
167+
:return: list of entries of the tree pointed to by tree_sha. An entry
168+
has the following format:
169+
* [0] 20 byte sha
170+
* [1] mode as int
171+
* [2] path relative to the repository
172+
:param path_prefix: prefix to prepend to the front of all returned paths"""
173+
entries = list()
174+
data = tree_entries_from_data(odb.stream(tree_sha).read())
175+
176+
# unpacking/packing is faster than accessing individual items
177+
for sha, mode, name in data:
178+
if S_ISDIR(mode):
179+
entries.extend(traverse_tree_recursive(odb, sha, path_prefix+name+'/'))
180+
else:
181+
entries.append((sha, mode, path_prefix+name))
182+
# END for each item
183+
184+
return entries

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy