Skip to content

Commit 45e81d5

Browse files
committedJan 31, 2019
read proofing
1 parent 398cb76 commit 45e81d5

File tree

10 files changed

+304
-96
lines changed

10 files changed

+304
-96
lines changed
 

‎book/book-all.adoc

Lines changed: 155 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,155 @@
1+
include::_conf/variables.adoc[]
2+
3+
= {doctitle}
4+
5+
// remove numbering from titles, and sub-titles e.g. 1.1
6+
:sectnums!:
7+
8+
// Copyright © 2018 Adrian Mejia
9+
include::chapters/colophon.adoc[]
10+
11+
// Abstract and Dedication MUST have a level-0 heading in EPUB and Kindle
12+
// but level-1 in PDF and HTML
13+
ifndef::backend-epub3[:leveloffset: +1]
14+
include::chapters/dedication.adoc[]
15+
ifndef::backend-epub3[:leveloffset: -1]
16+
17+
// TODO: pending
18+
include::chapters/preface.adoc[]
19+
20+
include::chapters/cheatsheet.adoc[]
21+
22+
// add sections to chapters
23+
:sectnums:
24+
25+
//
26+
// chapters
27+
//
28+
29+
= Algorithms Analysis
30+
31+
// TODO: pending
32+
include::chapters/algorithms-analysis-intro.adoc[]
33+
34+
:leveloffset: +1
35+
36+
include::chapters/algorithms-analysis.adoc[]
37+
38+
include::chapters/big-o-examples.adoc[]
39+
40+
:leveloffset: -1
41+
42+
= Linear Data Structures
43+
44+
include::chapters/linear-data-structures-intro.adoc[]
45+
46+
:leveloffset: +1
47+
48+
include::chapters/array.adoc[]
49+
50+
include::chapters/linked-list.adoc[]
51+
52+
include::chapters/stack.adoc[]
53+
54+
include::chapters/queue.adoc[]
55+
56+
:leveloffset: -1
57+
58+
= Non-Linear Data Structures
59+
60+
include::chapters/non-linear-data-structures-intro.adoc[]
61+
62+
:leveloffset: +1
63+
64+
include::chapters/tree.adoc[]
65+
66+
include::chapters/binary-search-tree.adoc[]
67+
68+
include::chapters/map.adoc[]
69+
70+
include::chapters/set.adoc[]
71+
72+
include::chapters/graph.adoc[]
73+
74+
75+
:leveloffset: -1
76+
77+
= Advanced Non-Linear Data Structures
78+
79+
// TODO: pending
80+
include::chapters/non-linear-data-structures-intro-advanced.adoc[]
81+
82+
:leveloffset: +1
83+
84+
// TODO: pending
85+
include::chapters/avl-tree.adoc[]
86+
87+
// TODO: pending (optional)
88+
// include::chapters/red-black-tree.adoc[]
89+
90+
// TODO: pending
91+
include::chapters/heap.adoc[]
92+
93+
// TODO: (optional) pending
94+
// include::chapters/trie.adoc[]
95+
96+
97+
:leveloffset: -1
98+
99+
= Algorithms
100+
101+
// TODO: pending
102+
include::chapters/algorithms-intro.adoc[]
103+
104+
:leveloffset: +1
105+
106+
// TODO: pending
107+
include::chapters/sorting-intro.adoc[]
108+
109+
//
110+
// Slow Sorting
111+
//
112+
113+
include::chapters/insertion-sort.adoc[]
114+
115+
include::chapters/selection-sort.adoc[]
116+
117+
include::chapters/bubble-sort.adoc[]
118+
119+
//
120+
// Fast Sorting
121+
//
122+
123+
include::chapters/merge-sort.adoc[]
124+
125+
include::chapters/quick-sort.adoc[]
126+
127+
// TODO: (optional) pending
128+
// include::chapters/heap-sort.adoc[]
129+
130+
// TODO: (optional) pending
131+
// include::chapters/tim-sort.adoc[]
132+
133+
//
134+
// Searching
135+
//
136+
137+
// TODO: pending
138+
include::chapters/graph-search.adoc[]
139+
140+
:leveloffset: -1
141+
142+
//
143+
// end chapters
144+
//
145+
146+
include::chapters/epigraph.adoc[]
147+
148+
// TODO: (optional) pending
149+
// include::chapters/appendix.adoc[]
150+
151+
// TODO: (optional) pending
152+
ifdef::backend-pdf[]
153+
include::chapters/index.adoc[]
154+
endif::[]
155+

‎book/book.adoc

Lines changed: 7 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ include::_conf/variables.adoc[]
55
// remove numbering from titles, and sub-titles e.g. 1.1
66
:sectnums!:
77

8-
// Copyright © 2018 Adrian Mejia
8+
// Copyright © 2018 Adrian Mejia (g)
99
include::chapters/colophon.adoc[]
1010

1111
// Abstract and Dedication MUST have a level-0 heading in EPUB and Kindle
@@ -14,9 +14,12 @@ ifndef::backend-epub3[:leveloffset: +1]
1414
include::chapters/dedication.adoc[]
1515
ifndef::backend-epub3[:leveloffset: -1]
1616

17-
// TODO: pending
17+
// (g)
1818
include::chapters/preface.adoc[]
1919

20+
// TODO: review and complete when the rest is completed
21+
include::chapters/cheatsheet.adoc[]
22+
2023
// add sections to chapters
2124
:sectnums:
2225

@@ -26,11 +29,11 @@ include::chapters/preface.adoc[]
2629

2730
= Algorithms Analysis
2831

29-
// TODO: pending
3032
include::chapters/algorithms-analysis-intro.adoc[]
3133

3234
:leveloffset: +1
3335

36+
// (g)
3437
include::chapters/algorithms-analysis.adoc[]
3538

3639
include::chapters/big-o-examples.adoc[]
@@ -53,7 +56,7 @@ include::chapters/queue.adoc[]
5356

5457
:leveloffset: -1
5558

56-
= Non-Linear Data Structures (fundamental)
59+
= Non-Linear Data Structures
5760

5861
include::chapters/non-linear-data-structures-intro.adoc[]
5962

@@ -70,28 +73,6 @@ include::chapters/set.adoc[]
7073
include::chapters/graph.adoc[]
7174

7275

73-
:leveloffset: -1
74-
75-
= Non-Linear Data Structures (advanced)
76-
77-
// TODO: pending
78-
include::chapters/non-linear-data-structures-intro-advanced.adoc[]
79-
80-
:leveloffset: +1
81-
82-
// TODO: pending
83-
include::chapters/avl-tree.adoc[]
84-
85-
// TODO: pending
86-
include::chapters/red-black-tree.adoc[]
87-
88-
// TODO: pending
89-
include::chapters/heap.adoc[]
90-
91-
// TODO: pending
92-
include::chapters/trie.adoc[]
93-
94-
9576
:leveloffset: -1
9677

9778
= Algorithms
@@ -122,9 +103,6 @@ include::chapters/merge-sort.adoc[]
122103

123104
include::chapters/quick-sort.adoc[]
124105

125-
// TODO: pending
126-
include::chapters/tim-sort.adoc[]
127-
128106
//
129107
// Searching
130108
//
@@ -139,13 +117,3 @@ include::chapters/graph-search.adoc[]
139117
//
140118

141119
include::chapters/epigraph.adoc[]
142-
143-
// TODO: pending
144-
include::chapters/appendix.adoc[]
145-
146-
ifdef::backend-pdf[]
147-
148-
// include::chapters/index.adoc[]
149-
150-
endif::[]
151-
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
[partintro]
22
--
3-
Adipisicing occaecat qui amet sint officia ullamco anim proident eu. Et nostrud sint do nisi cupidatat aute ea laborum est Lorem elit est. Sit magna id aute elit tempor cillum consectetur fugiat. Labore aute ea dolore aliquip labore laborum cillum ullamco aliquip laborum exercitation dolore mollit ad.
3+
In this section we are going to cover the basics about algorithms analysis. We are also going to discuss eight of the most commmon runtimes of algorithms.
44
--
Lines changed: 48 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,27 @@
1-
= Learning Algorithms Analysis
1+
= Fundamentals of Algorithms Analysis
22

33
Chances are you are reading this book because you want to write better and faster code.
44
How can you do that? Can you time how long it takes to run a program? Of course, you can!
55
[big]#⏱#
6-
However, if you run the same program on a smart watch, cellphone or desktop computer it will give you different times.
6+
However, if you run the same program on a smartwatch, cellphone or desktop computer, it will give you different very times.
77

88
image:image3.png[image,width=528,height=137]
99

1010
Wouldn't it be great if we can compare algorithms regardless of the hardware where we run them?
1111
That's what *time complexity* is for!
1212
But why stop with the running time?
13-
We could also compare the memory "used" by different algorithms and we called that *space complexity*.
13+
We could also compare the memory "used" by different algorithms, and we called that *space complexity*.
1414

1515
.In this chapter you will learn:
16-
- What’s the best way to measure your code performance.
17-
- Learn how to use Big O notation to compare algorithms.
18-
- How to use algorithms analysis to improve your programs speed.
16+
- What’s the best way to measure your code performance.
17+
- Learn how to use Big O notation to compare algorithms.
18+
- How to use algorithms analysis to improve your programs speed.
1919
20-
Before going deeper, into space and time complexity, let's define what an algorithm is.
20+
Before going deeper, into space and time complexity, let's cover the basics real quick.
2121

2222
== What are Algorithms?
2323

24-
Algorithms (as you might know) are steps of how to do some task. When you cook, you follow a recipe (or an algorithm) to prepare a dish. Let's say you want to prepare a pizza...
24+
Algorithms (as you might know) are steps of how to do some task. When you cook, you follow a recipe (or an algorithm) to prepare a dish. Let's say you want to make a pizza.
2525

2626
.Example of an algorithm
2727
//[source, js]
@@ -38,15 +38,17 @@ function bakePizza(dough, toppins = []) {
3838
bakePizza(new Dough, ['ham', 'cheese']);
3939
----
4040

41-
If you play a game, you are devising strategies (or an algorithm) to help you win. Likewise, algorithms in computers are a set of instructions used to solve a problem.
41+
If you play a game, you are devising strategies (or algorithms) to help you win. Likewise, algorithms in computers are a set of instructions used to solve a problem.
4242

43-
TIP: Algorithms are instructions to perform a task.
43+
TIP: Algorithms are instructions on how to perform a task.
4444

4545
== Comparing Algorithms
4646

47-
There are “good” algorithms and “bad” algorithms. The good ones are fast; the bad ones are slow. Slow algorithms cost more money and make some calculations impossible in our lifespan!
47+
Not all algorithms are created equal. There are “good” and “bad” algorithms. The good ones are fast; the bad ones are slow. Slow algorithms cost more money to run. Inefficient algorithms could make some calculations impossible in our lifespan!
4848

49-
Just to give you a clearer picture how different algorithms perform as the input size grows.
49+
Most algorithms are affected by the size of the input. Let's say you need to arrange numbers in ascending order. Sorting ten digits will naturally take much less time than sorting 2 million of them.
50+
51+
To give you a clearer picture of how different algorithms perform as the input size grows, take a look at the following table.
5052

5153
.Relationship between algorithm input size and time taken to complete
5254
[cols=",,,,,",options="header",]
@@ -59,9 +61,9 @@ Just to give you a clearer picture how different algorithms perform as the input
5961
|Find all permutations of a string |4 sec. |> vigintillion years |> centillion years |∞ |∞
6062
|=============================================================================================
6163

62-
You can really notice the difference between a good algorithm and bad with the sorting array elements examples: `merge-sort` vs `bubble sort`.
64+
However, if you keep the input size constant, you can notice the difference between an efficient algorithm and a slow one. An excellent sorting algorithm is `mergesort` for instance, and inefficient algorithm for large inputs is `bubble sort` .
6365
Organizing 1 million elements with merge sort takes 20 seconds while bubble sort takes 12 days, ouch!
64-
The amazing thing is that both programs are measured on the same hardware with exactly the same data!
66+
The amazing thing is that both programs are measured on the same hardware with the same data!
6567

6668
After completing this book, you are going to *think differently*.
6769
You will be able to scale your programs while you are designing them.
@@ -72,36 +74,40 @@ Find bottlenecks of existing software and have an "algorithmic toolbox" to switc
7274
The first step to improve your code performance is to measure it. As somebody said:
7375

7476
[quote, H. J. Harrington]
75-
Measurement is the first step that leads to control and eventually to improvement. If you can’t measure something, you can’t understand it. If you can’t understand it, you can’t control it. If you can’t control it, you can’t improve it.
77+
Measurement is the first step that leads to control and eventually to improvement. If you can’t measure something, you can’t understand it. If you can’t understand it, you can’t control it. If you can’t manage it, you can’t improve it.
7678

77-
In this section we are going to learn the basics to measuring our current code performance and compare it with others.
79+
In this section, we are going to learn the basics of measuring our current code performance and compare it with other algorithms.
7880

7981
=== Calculating Time Complexity
8082

81-
Time complexity, in computer science, is a function that describes the amount of operations a program will execute given the size of the input `n`.
83+
Time complexity, in computer science, is a function that describes the number of operations a program will execute given the size of the input `n`.
8284

83-
How do get a function that give us the amount of operations that will executed? Well, we count line by line and mind code inside loops. For instance, we have a function to find the minimum value on an array called `getMin`.
85+
How do get a function that gives us the number of operations that will be executed? Well, we count line by line and mind code inside loops. Let's do an example to explain this point. For instance, we have a function to find the minimum value on an array called `getMin`.
8486

85-
.Translating lines of code to approximate number of operations
87+
.Translating lines of code to an approximate number of operations
8688
image:image4.png[Operations per line]
8789

88-
Assuming that each line of code is an operation, we get the following that the number of operations given the input size `n` is:
90+
Assuming that each line of code is an operation, we get the following:
8991

9092
_3n + 3_
9193

92-
That means that if give an array of 3 elements e.g. `getMin([3, 2, 9])`, then it will execute around _3(3)+3 = 12_ operations. Of course, this is not exact. Line 12 is only executed if the condition on line 11 is met. As you might learn in the next section, we want to get the big picture and get rid of smaller terms in order to compare algorithms easier.
94+
`n` = input size.
95+
96+
That means that if give an array of 3 elements e.g. `getMin([3, 2, 9])`, then it will execute around _3(3)+3 = 12_ operations. Of course, this is not for every case. For instance, Line 12 is only executed if the condition on line 11 is met. As you might learn in the next section, we want to get the big picture and get rid of smaller terms to compare algorithms easier.
9397

9498
== Space Complexity
9599

96-
Space complexity is similar to time complexity. However, instead of the count of operations executed it will be the amount of memory used additional to the input.
100+
Space complexity is similar to time complexity. However, instead of the count of operations executed, it will account for the amount of memory used additionally to the input.
97101

98-
Calculating the *space complexity* we keep track of the “variables” and memory used. In the `getMin` example, we just create a single variable called `min`. So, the space complexity is 1. If we had to copy values to another array then the space complexity would be `n`.
102+
For calculating the *space complexity* we keep track of the “variables” and memory used. In the `getMin` example, we just create a single variable called `min`. So, the space complexity is 1. On other algorithms, If we have to use an auxiliary array, then the space complexity would be `n`.
99103

100104
=== Simplifying Complexity with Asymptotic Analysis
101105

102-
Asymptotic analysis is the of functions when their inputs approaches infinity.
106+
When we are comparing algorithms, we don't want to have complex expressions. What would you prefer comparing two algorithms like "3n^2^ + 7n" vs. "1000 n + 2000" or compare them as "n^2^ vs. n"? Well, that when the asymptotic analysis comes to the rescue.
103107

104-
In the previous example we analyzed `getMin` with an array of size 3, what happen size is 10 or 10k or a million?
108+
Asymptotic analysis is the of functions when their inputs approach infinity.
109+
110+
In the previous example, we analyzed `getMin` with an array of size 3, what happen size is 10 or 10k or a million?
105111

106112
.Operations performed by an algorithm with a time complexity of 3n+3
107113
[cols=",,",options="header",]
@@ -112,31 +118,32 @@ In the previous example we analyzed `getMin` with an array of size 3, what happe
112118
|1M |3(1M)+3 |3,000,003
113119
|===========================
114120

115-
As the input size n grows bigger and bigger then the expression _3n+3_ could be represented as _3n_ or even _n_. This might look like a stretch at first, but you will see that what matters the most is the order of the function rather than lesser terms and constants. Actually, there’s a notation called *Big O*, where O refers to the *order of the function*.
121+
As the input size `n` grows bigger and bigger then the expression _3n + 3_ could be represented as _3n_ without loosing too much or even _n_. Dropping terms might look like a stretch at first, but you will see that what matters the most is the higher order terms of the function rather than lesser terms and constants. There’s a notation called *Big O*, where O refers to the *order of the function*.
116122

117-
If you have a program which run time is like
123+
If you have a program which runs time is like
118124

119125
_7n^3^ + 3n^2^ + 5_
120126

121-
You can safely say that its run time is _n^3^_ since the others term will become less and less significant as the inputs grows bigger.
127+
You can safely say that its run time is _n^3^_. The other terms will become less and less significant as the input grows bigger.
122128

123129
=== What is Big O Notation anyways?
124130

125-
Big O notation, only cares about the “biggest” terms in the time/space complexity. So, it combines what we learn about time and space complexity, asymptotic analysis and adds worst-case scenario.
131+
Big O notation, only cares about the “biggest” terms in the time/space complexity. So, it combines what we learn about time and space complexity, asymptotic analysis and adds a worst-case scenario.
126132

127-
.All algorithms have 3 scenarios:
128-
* Best-case scenario: the most favorable input where the program will take the least amount of operations to complete. E.g. array already sorted for a sorting algorithm.
129-
* Average-case scenario: the most common the input comes. E.g. array items in random order for a sorting algorithm.
130-
* Worst-case scenario: the inputs are arranged in such a way that cause the program to take the longest possible to complete the task. E.g. array items in reversed order for a sorting algorithm.
133+
.All algorithms have three scenarios:
134+
* Best-case scenario: the most favorable input arrange where the program will take the least amount of operations to complete. E.g., array already sorted is beneficial for some sorting algorithms.
135+
* Average-case scenario: this is the most common case. E.g., array items in random order for a sorting algorithm.
136+
* Worst-case scenario: the inputs are arranged in such a way that causes the program to take the longest to complete. E.g., array items in reversed order for some sorting algorithm will take the longest to run.
131137

132138
To sum up:
133139

134-
IMPORTANT: Big O only cares about the highest order of the run time function and the worst-case scenario.
135-
There are many common notations like polynomial, _O(n^2^)_ like we saw in the getMin example; constant O(1) and many more that we are going to explore in the next chapter.
140+
TIP: Big O only cares about the highest order of the run time function and the worst-case scenario.
141+
142+
WARNING: Don't drop terms that multiplying other terms. _O(n log n)_ is not equivalent to _O(n)_. However, _O(n + log n)_ is.
136143

137-
Again, time complexity is not a direct measure of how long a program takes to execute but rather how many operations it executes in function of the input. However, there’s a relationship between time and operations executed. This changes from hardware to hardware but it gives you an idea.
144+
There are many common notations like polynomial, _O(n^2^)_ like we saw in the `getMin` example; constant O(1) and many more that we are going to explore in the next chapter.
138145

139-
Readers might not know what this O(n!) means…
146+
Again, time complexity is not a direct measure of how long a program takes to execute but rather how many operations it performs in given the input size. Nevertheless, there’s a relationship between time complexity and clock time as we can see in the following table.
140147

141148
.How long an algorithm takes to run based on their time complexity and input size
142149
[cols=",,,,,,",options="header",]
@@ -149,14 +156,14 @@ Readers might not know what this O(n!) means…
149156
|1M |< 1 sec. |1 second |20 seconds |12 days |∞ |∞
150157
|===============================================================
151158

152-
This just an illustration since in a different hardware the times will be slightly different.
159+
This just an illustration since in different hardware the times will be slightly different.
153160

154-
NOTE: These times are under the assumption of running on 1 GHz CPU and that it can execute on average one instruction in 1 nanosecond (usually takes more time). Also, bear in mind that each line might be translated into dozens of CPU instructions depending on the programming language. Regardless, bad algorithms still perform badly even in a super computer.
161+
NOTE: These times are under the assumption of running on 1 GHz CPU and that it can execute on average one instruction in 1 nanosecond (usually takes more time). Also, bear in mind that each line might be translated into dozens of CPU instructions depending on the programming language. Regardless, bad algorithms would perform poorly even on a supercomputer.
155162

156163
== Summary
157164

158-
In this chapter we learned how you can measure your algorithm performance using time complexity. Rather than timing how long you program take to run you can approximate the number of operations it will perform based on the input size.
165+
In this chapter, we learned how you could measure your algorithm performance using time complexity. Rather than timing how long your program take to run you can approximate the number of operations it will perform based on the input size.
159166

160-
We went thought the process of deducting the time complexity from a simple algorithm. We learned about time and space complexity and how they can be translated to Big O notation. Big O refers to the order of the function.
167+
We learned about time and space complexity and how they can be translated to Big O notation. Big O refers to the *order* of the function.
161168

162169
In the next section, we are going to provide examples of each of the most common time complexities!

‎book/chapters/avl-tree.adoc

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
11
= AVL Tree
22

3-
Ex nulla ex officia reprehenderit aliquip esse. Minim magna commodo fugiat occaecat qui. Esse reprehenderit cupidatat qui et ullamco amet cupidatat sunt pariatur laboris Lorem. Anim non aliquip duis est occaecat minim et eu proident.
3+
The AVL tree builds on top of a <<Binary Search Tree>> and it keeps it balanced on insertions. It prevents a BST worst case scenario when the tree is totally unbalanced to one side (similar to linked list), then it takes O(n) to find an element instead of O(log n).
4+
5+

‎book/chapters/cheatsheet.adoc

Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
= Cheatsheet
2+
3+
This section summerize what we are going to cover in the rest of this book.
4+
5+
== Linear Data Structures
6+
7+
.Time and Space Complexity of Linear Data Structures (Array, LinkedList, Stack & Queues)
8+
|===
9+
.2+.^s| Data Structure 2+^s| Searching By 3+^s| Inserting at the 3+^s| Deleting from .2+.^s| Space Complexity
10+
^|_Index/Key_ ^|_Value_ ^|_beginning_ ^|_middle_ ^|_end_ ^|_beginning_ ^|_middle_ ^|_end_
11+
| <<Array>> ^|O(1) ^|O(n) ^|O(n) ^|O(n) ^|O(1) ^|O(n) ^|O(n) ^|O(1) ^|O(n)
12+
| <<Singly Linked List>> ^|O(n) ^|O(n) ^|O(1) ^|O(n) ^|O(1) ^|O(1) ^|O(n) ^|*O(n)* ^|O(n)
13+
| <<Doubly Linked List>> ^|O(n) ^|O(n) ^|O(1) ^|O(n) ^|O(1) ^|O(1) ^|O(n) ^|*O(1)* ^|O(n)
14+
| <<Stack>> ^|- ^|- ^|- ^|- ^|O(1) ^|- ^|- ^|O(1) ^|O(n)
15+
| Queue (w/array) ^|- ^|- ^|- ^|- ^|*O(n)* ^|- ^|- ^|O(1) ^|O(n)
16+
| <<Queue>> (w/list) ^|- ^|- ^|- ^|- ^|O(1) ^|- ^|- ^|O(1) ^|O(n)
17+
|===
18+
19+
== Trees and Maps Data Structures
20+
21+
This section covers Binary Search Tree (BST) time complexity (Big O).
22+
23+
.Time and Space Complexity for Non-Linear Data Structures
24+
|===
25+
.2+.^s| Data Structure 2+^s| Searching By .2+^.^s| Insert .2+^.^s| Delete .2+^.^s| Space Complexity
26+
^|_Index/Key_ ^|_Value_
27+
| BST (**un**balanced) ^|- ^|O(n) ^|O(n) ^|O(n) ^|O(n)
28+
| BST (balanced) ^|- ^|O(log n) ^|O(log n) ^|O(log n) ^|O(n)
29+
| Hash Map (naïve) ^|O(n) ^|O(n) ^|O(n) ^|O(n) ^|O(n)
30+
| Hash Map (optimized) ^|O(1)* ^|O(n) ^|O(1)* ^|O(1)* ^|O(1)*
31+
| Tree Map (Red-Black Tree) ^|O(log n) ^|O(n) ^|O(log n) ^|O(log n) ^|O(log n)
32+
| HashSet ^|- ^|O(n) ^|O(1)* ^|O(1)* ^|O(1)*
33+
| TreeSet ^|- ^|O(n) ^|O(log n) ^|O(log n) ^|O(log n)
34+
|===
35+
{empty}* = Amortized run time. E.g. rehashing might affect run time to *O(n)*.
36+
37+
38+
Time complexity for a Graph data structure
39+
|===
40+
.2+.^s| Data Structure 2+^s| Vertices 2+^s| Edges .2+^.^s| Space Complexity
41+
^|_Add_ ^|_Remove_ ^|_Add_ ^|_Remove_
42+
| Graph (adj. matrix) ^| O(\|V\|^2^) ^| O(\|V\|^2^) ^|O(1) ^|O(1) ^|O(\|V\|^2^)
43+
| Graph (adj. list w/array) ^| O(1) ^| O(\|V\| + \|E\|)) ^|O(1) ^|O(\|V\| + \|E\|) ^|O(\|V\| + \|E\|)
44+
| Graph (adj. list w/HashSet) ^| O(1) ^| O(\|V\|)) ^|O(1) ^|O(\|V\|) ^|O(\|V\| + \|E\|)
45+
|===
46+
47+
== Sorting Algorithms
48+
49+
|===
50+
| Algorithms | Runtime | Space | Stable | In-place | Online | Adaptive | Comments
51+
| Insertion sort | O(n^2^) | O(1) | Yes | Yes | Yes | Yes |
52+
| Selection sort | O(n^2^) | O(1) | Yes | Yes | Yes | Yes |
53+
| Bubble sort | O(n^2^) | O(1) | Yes | Yes | Yes | Yes |
54+
| Merge sort | O(n log n) | O(n) | Yes | No | No | No |
55+
| Quick sort | O(n log n) | O(log n) | Yes | No | No | No |
56+
// | Tim sort | O(n log n) | O(log n) | Yes | No | No | Yes | Hybrid of merge and insertion sort
57+
|===
58+
59+
// https://algs4.cs.princeton.edu/cheatsheet/
60+
// http://bigocheatsheet.com/
61+
62+
// https://en.wikipedia.org/wiki/Timsort (Tim Peters)
63+
// https://bugs.python.org/file4451/timsort.txt
64+
// https://www.youtube.com/watch?v=emeME__917E&list=PLMCXHnjXnTntLcLmA5SqhMspm7burHi3m
65+
66+
// https://en.wikipedia.org/wiki/Sorting_algorithm
67+
// http://sorting.at/
68+
// https://www.toptal.com/developers/sorting-algorithms
69+
// https://www.infopulse.com/blog/timsort-sorting-algorithm/

‎book/chapters/colophon.adoc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,8 @@ All rights reserved.
99

1010
For online information and ordering this and other books, please visit https://adrianmejia.com. The publisher offers discounts on this book when ordered in quantity for more information contact sales@adrianmejia.com.
1111

12-
No part of this publication maybe produced, the store in the retrieval system, or transmitted, in any form or by mean electronic, mechanical, photocopying, or otherwise, without prior written permission of the publisher.
12+
No part of this publication may be produced, the store in the retrieval system, or transmitted, in any form or by means electronic, mechanical, photocopying, or otherwise, without the prior written permission of the publisher.
1313

14-
While every precaution has been taking in the preparation of this book, the publisher and author assume no responsibility for errors or omissions, or for damages resulting from the use of the information contained herein.
14+
While every precaution has been taking in the preparation of this book, the publisher and author assume no responsibility for errors or omissions, or damages resulting from the use of the information contained herein.
1515

1616
{revremark}, {revdate}.

‎book/chapters/graph.adoc

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
= Graph
22

3-
Graphs is one of my favorite data structures.
4-
They have a lot of cool applications and are used in more places than you can imagine.
3+
Graphs is one of my favorite data structures.
4+
They have a lot of cool applications and are used in more places than you can imagine.
55
First, let’s start with the basics.
66

77
TIP: A graph is a non-linear data structure where a node can have zero or
@@ -18,17 +18,17 @@ include::{codedir}/data-structures/graphs/node.js[tag=constructor]
1818
}
1919
----
2020

21-
As you can see, it’s pretty similar to the Linked List node indeed.
21+
As you can see, it’s pretty similar to the Linked List node indeed.
2222
The only difference is that it uses an *array* of the linked nodes instead of just one or two.
2323

2424
Other difference between a linked list and graph is that linked list
25-
have a start/first/root node, while the graph doesn’t.
25+
have a start/first/root node, while the graph doesn’t.
2626
You can start traversing a graph from anywhere and there might be circular references
2727
as well. Let’s study this graph properties!
2828

2929
== Graph Properties
3030

31-
The connection between two nodes is called *edge*.
31+
The connection between two nodes is called *edge*.
3232
Also, nodes might be called *vertex*.
3333

3434
.Graph is composed of vertices/nodes and edges
@@ -285,7 +285,7 @@ If we are dealing with a digraph (directed graph), then we just create one edge.
285285
include::{codedir}/data-structures/graphs/graph.js[tag=addEdge, indent=0]
286286
----
287287
<1> Find or create nodes if they don't exists yet.
288-
<2> Create edge from source to destination.
288+
<2> Create edge from source to destination.
289289
<3> If us a undirected graph, create the edge on the other direction.
290290

291291
We can add adjacencies using the `addAdjacent` mehtod from the Node class

‎book/chapters/heap-sort.adoc

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
= Heap Sort
2+
3+
Voluptate consequat magna laborum consectetur fugiat deserunt. Id sit est ullamco magna sint laborum proident. Exercitation cupidatat exercitation excepteur ex pariatur qui qui sint amet consectetur laborum ex mollit dolore.
4+
5+
Et do sunt do labore culpa est eu ut fugiat eiusmod ea excepteur. Irure commodo adipisicing in aute aliquip laborum laboris reprehenderit incididunt in sunt. Cupidatat veniam est culpa ex eu aute voluptate tempor aliqua ullamco sunt et consectetur. Eu laboris mollit culpa consequat. Sunt mollit quis dolor nostrud. In duis mollit do adipisicing veniam do deserunt exercitation Lorem deserunt aliquip. Ea esse reprehenderit incididunt eu deserunt sit nulla sint non eiusmod nisi eu et irure.
6+
7+
Ad commodo anim nulla occaecat non. Aute fugiat laborum ut mollit exercitation aute proident reprehenderit culpa consectetur. Cillum officia laborum proident labore sunt est eiusmod proident. Lorem nostrud ea qui tempor culpa ullamco ipsum. Dolore nulla minim qui incididunt qui sint consectetur quis tempor esse minim. Do id consequat commodo sit officia aliqua officia reprehenderit eiusmod elit do amet.

‎book/chapters/preface.adoc

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
[preface]
22
= Preface
33

4-
This book is intended for programmers who wants to go deeper into understanding the most common data structures and algorithms.
5-
Even tough you can use them without knowing how they work, it gives you a tool for analyzing trade-offs. If something is slow you would know what changes to make and how to analyze the code for better performance.
4+
This book is intended for programmers who want to go deeper into understanding the most common data structures and algorithms.
5+
Even though you can use them without knowing how they work, it's handy to know when to use one over the other. This book gives you a tool for analyzing trade-offs. When something is slow, you would know how to analyze the code for better performance.
66

7-
The concepts on this book can be applied to any programming language. However, instead of doing examples on pseudo-code we are going to use JavaScript to implement the examples. JavaScript is the lingua franca of the web and nowdays is growing its usages in the backend, IOT and others.
7+
The concepts in this book can be applied to any programming language. However, instead of doing examples on pseudo-code we are going to use JavaScript to implement the examples. JavaScript is the lingua franca of the web and nowadays is growing its usages in the backend, IOT, and others.
88

9-
The following admonitions are used to hightlight content
9+
The following admonitions are used to highlight content
1010

11-
IMPORTANT: Reword important concepts. Good for memorizing, tweeting and sharing.
11+
IMPORTANT: Reword essential concepts. Good for memorizing, tweeting and sharing.
1212

1313
.Side Note with Title
1414
[NOTE]
@@ -22,7 +22,7 @@ function a(test) {
2222
----
2323
====
2424

25-
Raw:
25+
Legend:
2626

2727
NOTE: NOTE
2828

0 commit comments

Comments
 (0)
Please sign in to comment.