Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion gitdb/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def _init_externals():
except ImportError as e:
raise ImportError("'%s' could not be imported, assure it is located in your PYTHONPATH" % module) from e
# END verify import
# END handel imports
# END handle imports

#} END initialization

Expand Down
2 changes: 1 addition & 1 deletion gitdb/db/pack.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def store(self, istream):

def update_cache(self, force=False):
"""
Update our cache with the acutally existing packs on disk. Add new ones,
Update our cache with the actually existing packs on disk. Add new ones,
and remove deleted ones. We keep the unchanged ones

:param force: If True, the cache will be updated even though the directory
Expand Down
8 changes: 4 additions & 4 deletions gitdb/fun.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def delta_chunk_apply(dc, bbuf, write):
write(bbuf[dc.so:dc.so + dc.ts])
else:
# APPEND DATA
# whats faster: if + 4 function calls or just a write with a slice ?
# what's faster: if + 4 function calls or just a write with a slice ?
# Considering data can be larger than 127 bytes now, it should be worth it
if dc.ts < len(dc.data):
write(dc.data[:dc.ts])
Expand Down Expand Up @@ -292,7 +292,7 @@ def check_integrity(self, target_size=-1):
"""Verify the list has non-overlapping chunks only, and the total size matches
target_size
:param target_size: if not -1, the total size of the chain must be target_size
:raise AssertionError: if the size doen't match"""
:raise AssertionError: if the size doesn't match"""
if target_size > -1:
assert self[-1].rbound() == target_size
assert reduce(lambda x, y: x + y, (d.ts for d in self), 0) == target_size
Expand Down Expand Up @@ -331,7 +331,7 @@ def connect_with_next_base(self, bdcl):
cannot be changed by any of the upcoming bases anymore. Once all our
chunks are marked like that, we can stop all processing
:param bdcl: data chunk list being one of our bases. They must be fed in
consequtively and in order, towards the earliest ancestor delta
consecutively and in order, towards the earliest ancestor delta
:return: True if processing was done. Use it to abort processing of
remaining streams if False is returned"""
nfc = 0 # number of frozen chunks
Expand Down Expand Up @@ -624,7 +624,7 @@ def apply_delta_data(src_buf, src_buf_size, delta_buf, delta_buf_size, write):

:param src_buf: random access data from which the delta was created
:param src_buf_size: size of the source buffer in bytes
:param delta_buf_size: size fo the delta buffer in bytes
:param delta_buf_size: size for the delta buffer in bytes
:param delta_buf: random access delta data
:param write: write method taking a chunk of bytes

Expand Down
4 changes: 2 additions & 2 deletions gitdb/pack.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ def write(self, pack_sha, write):
if ofs > 0x7fffffff:
tmplist.append(ofs)
ofs = 0x80000000 + len(tmplist) - 1
# END hande 64 bit offsets
# END handle 64 bit offsets
sha_write(pack('>L', ofs & 0xffffffff))
# END for each offset

Expand Down Expand Up @@ -506,7 +506,7 @@ class PackFile(LazyMixin):
"""A pack is a file written according to the Version 2 for git packs

As we currently use memory maps, it could be assumed that the maximum size of
packs therefor is 32 bit on 32 bit systems. On 64 bit systems, this should be
packs therefore is 32 bit on 32 bit systems. On 64 bit systems, this should be
fine though.

**Note:** at some point, this might be implemented using streams as well, or
Expand Down
2 changes: 1 addition & 1 deletion gitdb/stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ def read(self, size=-1):
# They are thorough, and I assume it is truly working.
# Why is this logic as convoluted as it is ? Please look at the table in
# https://github.com/gitpython-developers/gitdb/issues/19 to learn about the test-results.
# Bascially, on py2.6, you want to use branch 1, whereas on all other python version, the second branch
# Basically, on py2.6, you want to use branch 1, whereas on all other python version, the second branch
# will be the one that works.
# However, the zlib VERSIONs as well as the platform check is used to further match the entries in the
# table in the github issue. This is it ... it was the only way I could make this work everywhere.
Expand Down
2 changes: 1 addition & 1 deletion gitdb/test/db/lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def _assert_object_writing(self, db):

# DIRECT STREAM COPY
# our data hase been written in object format to the StringIO
# we pasesd as output stream. No physical database representation
# we passed as output stream. No physical database representation
# was created.
# Test direct stream copy of object streams, the result must be
# identical to what we fed in
Expand Down
2 changes: 1 addition & 1 deletion gitdb/test/db/test_pack.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def test_writing(self, path):
# END for each sha to find

# we should have at least one ambiguous, considering the small sizes
# but in our pack, there is no ambigious ...
# but in our pack, there is no ambiguous ...
# assert num_ambiguous

# non-existing
Expand Down
2 changes: 1 addition & 1 deletion gitdb/test/test_pack.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def _assert_pack_file(self, pack, version, size):
dstream = DeltaApplyReader.new(streams)
except ValueError:
# ignore these, old git versions use only ref deltas,
# which we havent resolved ( as we are without an index )
# which we haven't resolved ( as we are without an index )
# Also ignore non-delta streams
continue
# END get deltastream
Expand Down
6 changes: 3 additions & 3 deletions gitdb/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def file_contents_ro(fd, stream=False, allow_mmap=True):
pass
# END exception handling

# read manully
# read manually
contents = os.read(fd, os.fstat(fd).st_size)
if stream:
return _RandomAccessBytesIO(contents)
Expand Down Expand Up @@ -248,7 +248,7 @@ class LazyMixin(object):
def __getattr__(self, attr):
"""
Whenever an attribute is requested that we do not know, we allow it
to be created and set. Next time the same attribute is reqeusted, it is simply
to be created and set. Next time the same attribute is requested, it is simply
returned from our dict/slots. """
self._set_cache_(attr)
# will raise in case the cache was not created
Expand Down Expand Up @@ -332,7 +332,7 @@ def open(self, write=False, stream=False):

# open actual file if required
if self._fd is None:
# we could specify exlusive here, as we obtained the lock anyway
# we could specify exclusive here, as we obtained the lock anyway
try:
self._fd = os.open(self._filepath, os.O_RDONLY | binary)
except:
Expand Down