Skip to content

Commit 45c1c99

Browse files
committed
test_index works
1 parent 7297ff6 commit 45c1c99

File tree

8 files changed

+52
-39
lines changed

8 files changed

+52
-39
lines changed

git/compat.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,8 @@ def byte_ord(b):
3131
return b
3232
def bchr(n):
3333
return bytes([n])
34+
def mviter(d):
35+
return d.values()
3436
else:
3537
FileType = file
3638
# usually, this is just ascii, which might not enough for our encoding needs
@@ -39,6 +41,8 @@ def bchr(n):
3941
defenc = 'utf-8'
4042
byte_ord = ord
4143
bchr = chr
44+
def mviter(d):
45+
return d.itervalues()
4246

4347

4448
def with_metaclass(meta, *bases):

git/config.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,13 @@ def __init__(self, config, section):
9999
self._config = config
100100
self._section_name = section
101101

102+
def __del__(self):
103+
# Yes, for some reason, we have to call it explicitly for it to work in PY3 !
104+
# Apparently __del__ doesn't get call anymore if refcount becomes 0
105+
# Ridiculous ... .
106+
self._config.__del__()
107+
# del self._config
108+
102109
def __getattr__(self, attr):
103110
if attr in self._valid_attrs_:
104111
return lambda *args, **kwargs: self._call_config(attr, *args, **kwargs)
@@ -193,7 +200,7 @@ def __del__(self):
193200
"""Write pending changes if required and release locks"""
194201
# checking for the lock here makes sure we do not raise during write()
195202
# in case an invalid parser was created who could not get a lock
196-
if self.read_only or not self._lock._has_lock():
203+
if self.read_only or (self._lock and not self._lock._has_lock()):
197204
return
198205

199206
try:

git/index/base.py

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,9 @@
4343
izip,
4444
xrange,
4545
string_types,
46-
force_bytes
46+
force_bytes,
47+
defenc,
48+
mviter
4749
)
4850

4951
from git.util import (
@@ -105,7 +107,7 @@ def __init__(self, repo, file_path=None):
105107
repository's index on demand."""
106108
self.repo = repo
107109
self.version = self._VERSION
108-
self._extension_data = ''
110+
self._extension_data = b''
109111
self._file_path = file_path or self._index_path()
110112

111113
def _set_cache_(self, attr):
@@ -165,9 +167,7 @@ def _deserialize(self, stream):
165167

166168
def _entries_sorted(self):
167169
""":return: list of entries, in a sorted fashion, first by path, then by stage"""
168-
entries_sorted = self.entries.values()
169-
entries_sorted.sort(key=lambda e: (e.path, e.stage)) # use path/stage as sort key
170-
return entries_sorted
170+
return sorted(self.entries.values(), key=lambda e: (e.path, e.stage))
171171

172172
def _serialize(self, stream, ignore_tree_extension_data=False):
173173
entries = self._entries_sorted()
@@ -399,7 +399,7 @@ def _write_path_to_stdin(self, proc, filepath, item, fmakeexc, fprogress,
399399
fprogress(filepath, False, item)
400400
rval = None
401401
try:
402-
proc.stdin.write("%s\n" % filepath)
402+
proc.stdin.write(("%s\n" % filepath).encode(defenc))
403403
except IOError:
404404
# pipe broke, usually because some error happend
405405
raise fmakeexc()
@@ -418,7 +418,7 @@ def iter_blobs(self, predicate=lambda t: True):
418418
Function(t) returning True if tuple(stage, Blob) should be yielded by the
419419
iterator. A default filter, the BlobFilter, allows you to yield blobs
420420
only if they match a given list of paths. """
421-
for entry in self.entries.itervalues():
421+
for entry in mviter(self.entries):
422422
blob = entry.to_blob(self.repo)
423423
blob.size = entry.size
424424
output = (entry.stage, blob)
@@ -443,7 +443,7 @@ def unmerged_blobs(self):
443443
for stage, blob in self.iter_blobs(is_unmerged_blob):
444444
path_map.setdefault(blob.path, list()).append((stage, blob))
445445
# END for each unmerged blob
446-
for l in path_map.itervalues():
446+
for l in mviter(path_map):
447447
l.sort()
448448
return path_map
449449

@@ -860,7 +860,7 @@ def move(self, items, skip_errors=False, **kwargs):
860860

861861
# parse result - first 0:n/2 lines are 'checking ', the remaining ones
862862
# are the 'renaming' ones which we parse
863-
for ln in xrange(len(mvlines) / 2, len(mvlines)):
863+
for ln in xrange(int(len(mvlines) / 2), len(mvlines)):
864864
tokens = mvlines[ln].split(' to ')
865865
assert len(tokens) == 2, "Too many tokens in %s" % mvlines[ln]
866866

@@ -958,6 +958,7 @@ def handle_stderr(proc, iter_checked_out_files):
958958
if not stderr:
959959
return
960960
# line contents:
961+
stderr = stderr.decode(defenc)
961962
# git-checkout-index: this already exists
962963
failed_files = list()
963964
failed_reasons = list()
@@ -1006,7 +1007,7 @@ def handle_stderr(proc, iter_checked_out_files):
10061007
proc = self.repo.git.checkout_index(*args, **kwargs)
10071008
proc.wait()
10081009
fprogress(None, True, None)
1009-
rval_iter = (e.path for e in self.entries.itervalues())
1010+
rval_iter = (e.path for e in mviter(self.entries))
10101011
handle_stderr(proc, rval_iter)
10111012
return rval_iter
10121013
else:
@@ -1036,7 +1037,7 @@ def handle_stderr(proc, iter_checked_out_files):
10361037
dir = co_path
10371038
if not dir.endswith('/'):
10381039
dir += '/'
1039-
for entry in self.entries.itervalues():
1040+
for entry in mviter(self.entries):
10401041
if entry.path.startswith(dir):
10411042
p = entry.path
10421043
self._write_path_to_stdin(proc, p, p, make_exc,

git/index/fun.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ def write_cache(entries, stream, extension_data=None, ShaStreamCls=IndexFileSHA1
7373

7474
# header
7575
version = 2
76-
write("DIRC")
76+
write(b"DIRC")
7777
write(pack(">LL", version, len(entries)))
7878

7979
# body

git/index/typ.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ def binsha(self):
7575
@property
7676
def hexsha(self):
7777
"""hex version of our sha"""
78-
return b2a_hex(self[1])
78+
return b2a_hex(self[1]).decode('ascii')
7979

8080
@property
8181
def stage(self):

git/repo/base.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -732,7 +732,10 @@ def _clone(cls, git, url, path, odb_default_type, progress, **kwargs):
732732
# sure
733733
repo = cls(os.path.abspath(path), odbt=odbt)
734734
if repo.remotes:
735-
repo.remotes[0].config_writer.set_value('url', repo.remotes[0].url.replace("\\\\", "\\").replace("\\", "/"))
735+
writer = repo.remotes[0].config_writer
736+
writer.set_value('url', repo.remotes[0].url.replace("\\\\", "\\").replace("\\", "/"))
737+
# PY3: be sure cleanup is performed and lock is released
738+
del writer
736739
# END handle remote repo
737740
return repo
738741

git/test/test_index.py

Lines changed: 19 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def __init__(self, *args):
4848

4949
def _assert_fprogress(self, entries):
5050
assert len(entries) == len(self._fprogress_map)
51-
for path, call_count in self._fprogress_map.iteritems():
51+
for path, call_count in self._fprogress_map.items():
5252
assert call_count == 2
5353
# END for each item in progress map
5454
self._reset_progress()
@@ -86,7 +86,7 @@ def test_index_file_base(self):
8686
assert index.version > 0
8787

8888
# test entry
89-
entry = index.entries.itervalues().next()
89+
entry = next(iter(index.entries.values()))
9090
for attr in ("path", "ctime", "mtime", "dev", "inode", "mode", "uid",
9191
"gid", "size", "binsha", "hexsha", "stage"):
9292
getattr(entry, attr)
@@ -100,7 +100,7 @@ def test_index_file_base(self):
100100
# test stage
101101
index_merge = IndexFile(self.rorepo, fixture_path("index_merge"))
102102
assert len(index_merge.entries) == 106
103-
assert len(list(e for e in index_merge.entries.itervalues() if e.stage != 0))
103+
assert len(list(e for e in index_merge.entries.values() if e.stage != 0))
104104

105105
# write the data - it must match the original
106106
tmpfile = tempfile.mktemp()
@@ -167,7 +167,7 @@ def test_index_file_from_tree(self, rw_repo):
167167
assert unmerged_blob_map
168168

169169
# pick the first blob at the first stage we find and use it as resolved version
170-
three_way_index.resolve_blobs(l[0][1] for l in unmerged_blob_map.itervalues())
170+
three_way_index.resolve_blobs(l[0][1] for l in unmerged_blob_map.values())
171171
tree = three_way_index.write_tree()
172172
assert isinstance(tree, Tree)
173173
num_blobs = 0
@@ -201,7 +201,7 @@ def test_index_merge_tree(self, rw_repo):
201201
# Add a change with a NULL sha that should conflict with next_commit. We
202202
# pretend there was a change, but we do not even bother adding a proper
203203
# sha for it ( which makes things faster of course )
204-
manifest_fake_entry = BaseIndexEntry((manifest_entry[0], "\0" * 20, 0, manifest_entry[3]))
204+
manifest_fake_entry = BaseIndexEntry((manifest_entry[0], b"\0" * 20, 0, manifest_entry[3]))
205205
# try write flag
206206
self._assert_entries(rw_repo.index.add([manifest_fake_entry], write=False))
207207
# add actually resolves the null-hex-sha for us as a feature, but we can
@@ -236,7 +236,7 @@ def test_index_merge_tree(self, rw_repo):
236236
# now make a proper three way merge with unmerged entries
237237
unmerged_tree = IndexFile.from_tree(rw_repo, parent_commit, tree, next_commit)
238238
unmerged_blobs = unmerged_tree.unmerged_blobs()
239-
assert len(unmerged_blobs) == 1 and unmerged_blobs.keys()[0] == manifest_key[0]
239+
assert len(unmerged_blobs) == 1 and list(unmerged_blobs.keys())[0] == manifest_key[0]
240240

241241
@with_rw_repo('0.1.6')
242242
def test_index_file_diffing(self, rw_repo):
@@ -295,7 +295,7 @@ def test_index_file_diffing(self, rw_repo):
295295
assert index.diff(None)
296296

297297
# reset the working copy as well to current head,to pull 'back' as well
298-
new_data = "will be reverted"
298+
new_data = b"will be reverted"
299299
file_path = os.path.join(rw_repo.working_tree_dir, "CHANGES")
300300
fp = open(file_path, "wb")
301301
fp.write(new_data)
@@ -312,7 +312,7 @@ def test_index_file_diffing(self, rw_repo):
312312

313313
# test full checkout
314314
test_file = os.path.join(rw_repo.working_tree_dir, "CHANGES")
315-
open(test_file, 'ab').write("some data")
315+
open(test_file, 'ab').write(b"some data")
316316
rval = index.checkout(None, force=True, fprogress=self._fprogress)
317317
assert 'CHANGES' in list(rval)
318318
self._assert_fprogress([None])
@@ -336,7 +336,7 @@ def test_index_file_diffing(self, rw_repo):
336336
self.failUnlessRaises(CheckoutError, index.checkout, paths=["doesnt/exist"])
337337

338338
# checkout file with modifications
339-
append_data = "hello"
339+
append_data = b"hello"
340340
fp = open(test_file, "ab")
341341
fp.write(append_data)
342342
fp.close()
@@ -346,13 +346,13 @@ def test_index_file_diffing(self, rw_repo):
346346
assert len(e.failed_files) == 1 and e.failed_files[0] == os.path.basename(test_file)
347347
assert (len(e.failed_files) == len(e.failed_reasons)) and isinstance(e.failed_reasons[0], string_types)
348348
assert len(e.valid_files) == 0
349-
assert open(test_file).read().endswith(append_data)
349+
assert open(test_file, 'rb').read().endswith(append_data)
350350
else:
351351
raise AssertionError("Exception CheckoutError not thrown")
352352

353353
# if we force it it should work
354354
index.checkout(test_file, force=True)
355-
assert not open(test_file).read().endswith(append_data)
355+
assert not open(test_file, 'rb').read().endswith(append_data)
356356

357357
# checkout directory
358358
shutil.rmtree(os.path.join(rw_repo.working_tree_dir, "lib"))
@@ -379,14 +379,15 @@ def test_index_mutation(self, rw_repo):
379379

380380
uname = "Some Developer"
381381
umail = "sd@company.com"
382-
rw_repo.config_writer().set_value("user", "name", uname)
383-
rw_repo.config_writer().set_value("user", "email", umail)
382+
writer = rw_repo.config_writer()
383+
writer.set_value("user", "name", uname)
384+
writer.set_value("user", "email", umail)
384385

385386
# remove all of the files, provide a wild mix of paths, BaseIndexEntries,
386387
# IndexEntries
387388
def mixed_iterator():
388389
count = 0
389-
for entry in index.entries.itervalues():
390+
for entry in index.entries.values():
390391
type_id = count % 4
391392
if type_id == 0: # path
392393
yield entry.path
@@ -500,7 +501,7 @@ def mixed_iterator():
500501

501502
# mode 0 not allowed
502503
null_hex_sha = Diff.NULL_HEX_SHA
503-
null_bin_sha = "\0" * 20
504+
null_bin_sha = b"\0" * 20
504505
self.failUnlessRaises(ValueError, index.reset(
505506
new_commit).add, [BaseIndexEntry((0, null_bin_sha, 0, "doesntmatter"))])
506507

@@ -526,7 +527,7 @@ def mixed_iterator():
526527
assert S_ISLNK(index.entries[index.entry_key("my_real_symlink", 0)].mode)
527528

528529
# we expect only the target to be written
529-
assert index.repo.odb.stream(entries[0].binsha).read() == target
530+
assert index.repo.odb.stream(entries[0].binsha).read().decode('ascii') == target
530531
# END real symlink test
531532

532533
# add fake symlink and assure it checks-our as symlink
@@ -618,7 +619,7 @@ def make_paths():
618619

619620
for fid in range(3):
620621
fname = 'newfile%i' % fid
621-
open(fname, 'wb').write("abcd")
622+
open(fname, 'wb').write(b"abcd")
622623
yield Blob(rw_repo, Blob.NULL_BIN_SHA, 0o100644, fname)
623624
# END for each new file
624625
# END path producer
@@ -716,5 +717,5 @@ def test_index_bare_add(self, rw_bare_repo):
716717
try:
717718
rw_bare_repo.index.add([path])
718719
except Exception as e:
719-
asserted = "does not have a working tree" in e.message
720+
asserted = "does not have a working tree" in str(e)
720721
assert asserted, "Adding using a filename is not correctly asserted."

git/util.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -446,7 +446,7 @@ class IndexFileSHA1Writer(object):
446446

447447
def __init__(self, f):
448448
self.f = f
449-
self.sha1 = make_sha("")
449+
self.sha1 = make_sha(b"")
450450

451451
def write(self, data):
452452
self.sha1.update(data)
@@ -490,10 +490,7 @@ def _lock_file_path(self):
490490
def _has_lock(self):
491491
""":return: True if we have a lock and if the lockfile still exists
492492
:raise AssertionError: if our lock-file does not exist"""
493-
if not self._owns_lock:
494-
return False
495-
496-
return True
493+
return self._owns_lock
497494

498495
def _obtain_lock_or_raise(self):
499496
"""Create a lock file as flag for other instances, mark our instance as lock-holder
@@ -531,7 +528,7 @@ def _release_lock(self):
531528
# on bloody windows, the file needs write permissions to be removable.
532529
# Why ...
533530
if os.name == 'nt':
534-
os.chmod(lfp, int("0777", 8))
531+
os.chmod(lfp, 0o777)
535532
# END handle win32
536533
os.remove(lfp)
537534
except OSError:

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy