Skip to content

Commit 78e21e7

Browse files
committed
Added auto-doc api reference and fixed plenty of docstrings
1 parent 155b62a commit 78e21e7

File tree

12 files changed

+218
-43
lines changed

12 files changed

+218
-43
lines changed

db/base.py

Lines changed: 15 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,9 @@ def __init__(self, *args, **kwargs):
9090

9191
#{ Edit Interface
9292
def set_ostream(self, stream):
93-
"""Adjusts the stream to which all data should be sent when storing new objects
93+
"""
94+
Adjusts the stream to which all data should be sent when storing new objects
95+
9496
:param stream: if not None, the stream to use, if None the default stream
9597
will be used.
9698
:return: previously installed stream, or None if there was no override
@@ -100,30 +102,36 @@ def set_ostream(self, stream):
100102
return cstream
101103

102104
def ostream(self):
103-
""":return: overridden output stream this instance will write to, or None
105+
"""
106+
:return: overridden output stream this instance will write to, or None
104107
if it will write to the default stream"""
105108
return self._ostream
106109

107110
def store(self, istream):
108-
"""Create a new object in the database
111+
"""
112+
Create a new object in the database
109113
:return: the input istream object with its sha set to its corresponding value
114+
110115
:param istream: IStream compatible instance. If its sha is already set
111116
to a value, the object will just be stored in the our database format,
112117
in which case the input stream is expected to be in object format ( header + contents ).
113118
:raise IOError: if data could not be written"""
114119
raise NotImplementedError("To be implemented in subclass")
115120

116121
def store_async(self, reader):
117-
"""Create multiple new objects in the database asynchronously. The method will
122+
"""
123+
Create multiple new objects in the database asynchronously. The method will
118124
return right away, returning an output channel which receives the results as
119125
they are computed.
120126
121127
:return: Channel yielding your IStream which served as input, in any order.
122128
The IStreams sha will be set to the sha it received during the process,
123129
or its error attribute will be set to the exception informing about the error.
130+
124131
:param reader: async.Reader yielding IStream instances.
125132
The same instances will be used in the output channel as were received
126133
in by the Reader.
134+
127135
:note:As some ODB implementations implement this operation atomic, they might
128136
abort the whole operation if one item could not be processed. Hence check how
129137
many items have actually been produced."""
@@ -167,8 +175,10 @@ class CachingDB(object):
167175

168176
#{ Interface
169177
def update_cache(self, force=False):
170-
"""Call this method if the underlying data changed to trigger an update
178+
"""
179+
Call this method if the underlying data changed to trigger an update
171180
of the internal caching structures.
181+
172182
:param force: if True, the update must be performed. Otherwise the implementation
173183
may decide not to perform an update if it thinks nothing has changed.
174184
:return: True if an update was performed as something change indeed"""

db/pack.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -128,8 +128,10 @@ def store_async(self, reader):
128128
#{ Interface
129129

130130
def update_cache(self, force=False):
131-
"""Update our cache with the acutally existing packs on disk. Add new ones,
131+
"""
132+
Update our cache with the acutally existing packs on disk. Add new ones,
132133
and remove deleted ones. We keep the unchanged ones
134+
133135
:param force: If True, the cache will be updated even though the directory
134136
does not appear to have changed according to its modification timestamp.
135137
:return: True if the packs have been updated so there is new information,

doc/.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
build/

doc/source/api.rst

Lines changed: 113 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,113 @@
1+
.. _api_reference_toplevel:
2+
3+
#############
4+
API Reference
5+
#############
6+
7+
****************
8+
Database.Base
9+
****************
10+
11+
.. automodule:: gitdb.db.base
12+
:members:
13+
:undoc-members:
14+
15+
****************
16+
Database.Git
17+
****************
18+
19+
.. automodule:: gitdb.db.git
20+
:members:
21+
:undoc-members:
22+
23+
****************
24+
Database.Loose
25+
****************
26+
27+
.. automodule:: gitdb.db.loose
28+
:members:
29+
:undoc-members:
30+
31+
****************
32+
Database.Memory
33+
****************
34+
35+
.. automodule:: gitdb.db.mem
36+
:members:
37+
:undoc-members:
38+
39+
****************
40+
Database.Pack
41+
****************
42+
43+
.. automodule:: gitdb.db.pack
44+
:members:
45+
:undoc-members:
46+
47+
******************
48+
Database.Reference
49+
******************
50+
51+
.. automodule:: gitdb.db.ref
52+
:members:
53+
:undoc-members:
54+
55+
************
56+
Base
57+
************
58+
59+
.. automodule:: gitdb.base
60+
:members:
61+
:undoc-members:
62+
63+
************
64+
Functions
65+
************
66+
67+
.. automodule:: gitdb.fun
68+
:members:
69+
:undoc-members:
70+
71+
************
72+
Pack
73+
************
74+
75+
.. automodule:: gitdb.pack
76+
:members:
77+
:undoc-members:
78+
79+
************
80+
Streams
81+
************
82+
83+
.. automodule:: gitdb.stream
84+
:members:
85+
:undoc-members:
86+
87+
************
88+
Types
89+
************
90+
91+
.. automodule:: gitdb.typ
92+
:members:
93+
:undoc-members:
94+
95+
96+
************
97+
Utilities
98+
************
99+
100+
.. automodule:: gitdb.util
101+
:members:
102+
:undoc-members:
103+
104+
105+
106+
107+
108+
109+
110+
111+
112+
113+

doc/source/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
# If extensions (or modules to document with autodoc) are in another directory,
1717
# add these directories to sys.path here. If the directory is relative to the
1818
# documentation root, use os.path.abspath to make it absolute, like shown here.
19-
#sys.path.append(os.path.abspath('.'))
19+
sys.path.append(os.path.abspath('../../../'))
2020

2121
# -- General configuration -----------------------------------------------------
2222

doc/source/index.rst

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,10 @@ Contents:
1010

1111
.. toctree::
1212
:maxdepth: 2
13+
14+
intro
15+
tutorial
16+
api
1317

1418
Indices and tables
1519
==================

doc/source/intro.rst

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
########
2+
Overview
3+
########
4+

doc/source/tutorial.rst

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
2+
########
3+
Tutorial
4+
########

fun.py

Lines changed: 24 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -39,20 +39,22 @@
3939
# used when dealing with larger streams
4040
chunk_size = 1000*mmap.PAGESIZE
4141

42-
__all__ = ('is_loose_object', 'loose_object_header_info', 'object_header_info',
43-
'write_object' )
42+
__all__ = ('is_loose_object', 'loose_object_header_info', 'msb_size', 'pack_object_header_info',
43+
'write_object', 'loose_object_header', 'stream_copy', 'apply_delta_data' )
4444

4545
#{ Routines
4646

4747
def is_loose_object(m):
48-
""":return: True the file contained in memory map m appears to be a loose object.
49-
Only the first two bytes are needed"""
48+
"""
49+
:return: True the file contained in memory map m appears to be a loose object.
50+
Only the first two bytes are needed"""
5051
b0, b1 = map(ord, m[:2])
5152
word = (b0 << 8) + b1
5253
return b0 == 0x78 and (word % 31) == 0
5354

5455
def loose_object_header_info(m):
55-
""":return: tuple(type_string, uncompressed_size_in_bytes) the type string of the
56+
"""
57+
:return: tuple(type_string, uncompressed_size_in_bytes) the type string of the
5658
object as well as its uncompressed size in bytes.
5759
:param m: memory map from which to read the compressed object data"""
5860
decompress_size = 8192 # is used in cgit as well
@@ -61,9 +63,10 @@ def loose_object_header_info(m):
6163
return type_name, int(size)
6264

6365
def pack_object_header_info(data):
64-
""":return: tuple(type_id, uncompressed_size_in_bytes, byte_offset)
65-
The type_id should be interpreted according to the ``type_id_to_type_map`` map
66-
The byte-offset specifies the start of the actual zlib compressed datastream
66+
"""
67+
:return: tuple(type_id, uncompressed_size_in_bytes, byte_offset)
68+
The type_id should be interpreted according to the ``type_id_to_type_map`` map
69+
The byte-offset specifies the start of the actual zlib compressed datastream
6770
:param m: random-access memory, like a string or memory map"""
6871
c = ord(data[0]) # first byte
6972
i = 1 # next char to read
@@ -87,8 +90,9 @@ def pack_object_header_info(data):
8790
# END handle exceptions
8891

8992
def msb_size(data, offset=0):
90-
""":return: tuple(read_bytes, size) read the msb size from the given random
91-
access data starting at the given byte offset"""
93+
"""
94+
:return: tuple(read_bytes, size) read the msb size from the given random
95+
access data starting at the given byte offset"""
9296
size = 0
9397
i = 0
9498
l = len(data)
@@ -107,12 +111,14 @@ def msb_size(data, offset=0):
107111
return i+offset, size
108112

109113
def loose_object_header(type, size):
110-
""":return: string representing the loose object header, which is immediately
114+
"""
115+
:return: string representing the loose object header, which is immediately
111116
followed by the content stream of size 'size'"""
112117
return "%s %i\0" % (type, size)
113118

114119
def write_object(type, size, read, write, chunk_size=chunk_size):
115-
"""Write the object as identified by type, size and source_stream into the
120+
"""
121+
Write the object as identified by type, size and source_stream into the
116122
target_stream
117123
118124
:param type: type string of the object
@@ -131,8 +137,10 @@ def write_object(type, size, read, write, chunk_size=chunk_size):
131137
return tbw
132138

133139
def stream_copy(read, write, size, chunk_size):
134-
"""Copy a stream up to size bytes using the provided read and write methods,
140+
"""
141+
Copy a stream up to size bytes using the provided read and write methods,
135142
in chunks of chunk_size
143+
136144
:note: its much like stream_copy utility, but operates just using methods"""
137145
dbw = 0 # num data bytes written
138146

@@ -156,8 +164,10 @@ def stream_copy(read, write, size, chunk_size):
156164

157165

158166
def apply_delta_data(src_buf, src_buf_size, delta_buf, delta_buf_size, target_file):
159-
"""Apply data from a delta buffer using a source buffer to the target file,
167+
"""
168+
Apply data from a delta buffer using a source buffer to the target file,
160169
which will be written to
170+
161171
:param src_buf: random access data from which the delta was created
162172
:param src_buf_size: size of the source buffer in bytes
163173
:param delta_buf_size: size fo the delta buffer in bytes

0 commit comments

Comments
 (0)