diff --git a/.gitignore b/.gitignore
index 36e8309..e833757 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,7 +1,16 @@
+.vscode/
+.venv3?/
+*.sublime-project
+*.sublime-workspace
+.env*
+.DS_Store
concurrency/flags/img/*.gif
concurrency/charfinder/charfinder_index.pickle
+18-asyncio/charfinder/charfinder_index.pickle
metaprog/oscon-schedule/data/schedule?_db
concurrency/wikipedia/fixture/docroot/
+17-futures/countries/flags/
+attic/futures/countries/flags/
# Byte-compiled / optimized / DLL files
__pycache__/
diff --git a/01-data-model/README.rst b/01-data-model/README.rst
new file mode 100644
index 0000000..55e9fca
--- /dev/null
+++ b/01-data-model/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 1 - "The Python Data Model"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/datamodel/frenchdeck.doctest b/01-data-model/frenchdeck.doctest
similarity index 78%
rename from datamodel/frenchdeck.doctest
rename to 01-data-model/frenchdeck.doctest
index a5eca02..a2460c5 100644
--- a/datamodel/frenchdeck.doctest
+++ b/01-data-model/frenchdeck.doctest
@@ -31,24 +31,24 @@ Card(rank='Q', suit='hearts')
2 Card(rank='3', suit='spades')
3 Card(rank='4', suit='spades')
...
->>> def alt_color_rank(card):
+>>> suit_values = dict(spades=3, hearts=2, diamonds=1, clubs=0)
+>>> def spades_high(card):
... rank_value = FrenchDeck.ranks.index(card.rank)
-... suits = 'diamonds clubs hearts spades'.split()
-... return rank_value * len(suits) + suits.index(card.suit)
+... return rank_value * len(suit_values) + suit_values[card.suit]
Rank test:
->>> alt_color_rank(Card('2', 'diamonds'))
+>>> spades_high(Card('2', 'clubs'))
0
->>> alt_color_rank(Card('A', 'spades'))
+>>> spades_high(Card('A', 'spades'))
51
->>> for card in sorted(deck, key=alt_color_rank): # doctest: +ELLIPSIS
+>>> for card in sorted(deck, key=spades_high): # doctest: +ELLIPSIS
... print(card)
-Card(rank='2', suit='diamonds')
Card(rank='2', suit='clubs')
+Card(rank='2', suit='diamonds')
Card(rank='2', suit='hearts')
...
-Card(rank='A', suit='clubs')
+Card(rank='A', suit='diamonds')
Card(rank='A', suit='hearts')
Card(rank='A', suit='spades')
diff --git a/datamodel/frenchdeck.py b/01-data-model/frenchdeck.py
similarity index 100%
rename from datamodel/frenchdeck.py
rename to 01-data-model/frenchdeck.py
diff --git a/datamodel/vector2d.py b/01-data-model/vector2d.py
similarity index 100%
rename from datamodel/vector2d.py
rename to 01-data-model/vector2d.py
diff --git a/02-array-seq/README.rst b/02-array-seq/README.rst
new file mode 100644
index 0000000..6eb6357
--- /dev/null
+++ b/02-array-seq/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 2 - "An array of sequences"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/sequences/bisect_demo.py b/02-array-seq/bisect_demo.py
similarity index 100%
rename from sequences/bisect_demo.py
rename to 02-array-seq/bisect_demo.py
diff --git a/sequences/bisect_insort.py b/02-array-seq/bisect_insort.py
similarity index 100%
rename from sequences/bisect_insort.py
rename to 02-array-seq/bisect_insort.py
diff --git a/02-array-seq/listcomp_speed.py b/02-array-seq/listcomp_speed.py
new file mode 100644
index 0000000..0ec8b58
--- /dev/null
+++ b/02-array-seq/listcomp_speed.py
@@ -0,0 +1,18 @@
+import timeit
+
+TIMES = 10000
+
+SETUP = """
+symbols = '$¢£¥€¤'
+def non_ascii(c):
+ return c > 127
+"""
+
+def clock(label, cmd):
+ res = timeit.repeat(cmd, setup=SETUP, number=TIMES)
+ print(label, *('{:.3f}'.format(x) for x in res))
+
+clock('listcomp :', '[ord(s) for s in symbols if ord(s) > 127]')
+clock('listcomp + func :', '[ord(s) for s in symbols if non_ascii(ord(s))]')
+clock('filter + lambda :', 'list(filter(lambda c: c > 127, map(ord, symbols)))')
+clock('filter + func :', 'list(filter(non_ascii, map(ord, symbols)))')
diff --git a/sequences/metro_lat_long.py b/02-array-seq/metro_lat_long.py
similarity index 100%
rename from sequences/metro_lat_long.py
rename to 02-array-seq/metro_lat_long.py
diff --git a/03-dict-set/README.rst b/03-dict-set/README.rst
new file mode 100644
index 0000000..631db9b
--- /dev/null
+++ b/03-dict-set/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 3 - "Dictionaries and sets"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/dicts/dialcodes.py b/03-dict-set/dialcodes.py
similarity index 100%
rename from dicts/dialcodes.py
rename to 03-dict-set/dialcodes.py
diff --git a/dicts/index.py b/03-dict-set/index.py
similarity index 96%
rename from dicts/index.py
rename to 03-dict-set/index.py
index 3eac1fb..f8641d2 100644
--- a/dicts/index.py
+++ b/03-dict-set/index.py
@@ -8,7 +8,7 @@
import sys
import re
-WORD_RE = re.compile('\w+')
+WORD_RE = re.compile(r'\w+')
index = {}
with open(sys.argv[1], encoding='utf-8') as fp:
diff --git a/dicts/index0.py b/03-dict-set/index0.py
similarity index 91%
rename from dicts/index0.py
rename to 03-dict-set/index0.py
index a61dca2..b41af0e 100644
--- a/dicts/index0.py
+++ b/03-dict-set/index0.py
@@ -8,7 +8,7 @@
import sys
import re
-WORD_RE = re.compile('\w+')
+WORD_RE = re.compile(r'\w+')
index = {}
with open(sys.argv[1], encoding='utf-8') as fp:
@@ -23,6 +23,6 @@
index[word] = occurrences # <3>
# print in alphabetical order
-for word in sorted(index, key=str.upper):
+for word in sorted(index, key=str.upper): # <4>
print(word, index[word])
# END INDEX0
diff --git a/dicts/index_default.py b/03-dict-set/index_default.py
similarity index 96%
rename from dicts/index_default.py
rename to 03-dict-set/index_default.py
index 521b2d5..8d3ae58 100644
--- a/dicts/index_default.py
+++ b/03-dict-set/index_default.py
@@ -9,7 +9,7 @@
import re
import collections
-WORD_RE = re.compile('\w+')
+WORD_RE = re.compile(r'\w+')
index = collections.defaultdict(list) # <1>
with open(sys.argv[1], encoding='utf-8') as fp:
diff --git a/dicts/strkeydict.py b/03-dict-set/strkeydict.py
similarity index 100%
rename from dicts/strkeydict.py
rename to 03-dict-set/strkeydict.py
diff --git a/dicts/strkeydict0.py b/03-dict-set/strkeydict0.py
similarity index 100%
rename from dicts/strkeydict0.py
rename to 03-dict-set/strkeydict0.py
diff --git a/support/container_perftest.py b/03-dict-set/support/container_perftest.py
similarity index 100%
rename from support/container_perftest.py
rename to 03-dict-set/support/container_perftest.py
diff --git a/support/container_perftest_datagen.py b/03-dict-set/support/container_perftest_datagen.py
similarity index 100%
rename from support/container_perftest_datagen.py
rename to 03-dict-set/support/container_perftest_datagen.py
diff --git a/support/hashdiff.py b/03-dict-set/support/hashdiff.py
similarity index 100%
rename from support/hashdiff.py
rename to 03-dict-set/support/hashdiff.py
diff --git a/dicts/transformdict.py b/03-dict-set/transformdict.py
similarity index 100%
rename from dicts/transformdict.py
rename to 03-dict-set/transformdict.py
diff --git a/04-text-byte/README.rst b/04-text-byte/README.rst
new file mode 100644
index 0000000..60e8138
--- /dev/null
+++ b/04-text-byte/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 4 - "Text and bytes"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/strings-bytes/default_encodings.py b/04-text-byte/default_encodings.py
similarity index 100%
rename from strings-bytes/default_encodings.py
rename to 04-text-byte/default_encodings.py
diff --git a/strings-bytes/normeq.py b/04-text-byte/normeq.py
similarity index 100%
rename from strings-bytes/normeq.py
rename to 04-text-byte/normeq.py
diff --git a/strings-bytes/numerics_demo.py b/04-text-byte/numerics_demo.py
similarity index 100%
rename from strings-bytes/numerics_demo.py
rename to 04-text-byte/numerics_demo.py
diff --git a/strings-bytes/ola.py b/04-text-byte/ola.py
similarity index 100%
rename from strings-bytes/ola.py
rename to 04-text-byte/ola.py
diff --git a/strings-bytes/ramanujan.py b/04-text-byte/ramanujan.py
similarity index 100%
rename from strings-bytes/ramanujan.py
rename to 04-text-byte/ramanujan.py
diff --git a/strings-bytes/sanitize.py b/04-text-byte/sanitize.py
similarity index 100%
rename from strings-bytes/sanitize.py
rename to 04-text-byte/sanitize.py
diff --git a/05-1class-func/README.rst b/05-1class-func/README.rst
new file mode 100644
index 0000000..6825ea9
--- /dev/null
+++ b/05-1class-func/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 5 - "First-class functions"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/functions/bingo.py b/05-1class-func/bingocall.py
similarity index 54%
rename from functions/bingo.py
rename to 05-1class-func/bingocall.py
index 90345d6..90c0c90 100644
--- a/functions/bingo.py
+++ b/05-1class-func/bingocall.py
@@ -2,12 +2,13 @@
# BEGIN BINGO_DEMO
>>> bingo = BingoCage(range(3))
->>> bingo()
-2
+>>> bingo.pick()
+1
>>> bingo()
0
>>> callable(bingo)
True
+
# END BINGO_DEMO
"""
@@ -22,9 +23,13 @@ def __init__(self, items):
self._items = list(items) # <1>
random.shuffle(self._items) # <2>
- def __call__(self):
- if not self._items: # <3>
- raise IndexError('pop from empty BingoCage')
- return self._items.pop()
+ def pick(self): # <3>
+ try:
+ return self._items.pop()
+ except IndexError:
+ raise LookupError('pick from empty BingoCage') # <4>
+
+ def __call__(self): # <5>
+ return self.pick()
# END BINGO
diff --git a/functions/clip.py b/05-1class-func/clip.py
similarity index 100%
rename from functions/clip.py
rename to 05-1class-func/clip.py
diff --git a/functions/clip_annot.py b/05-1class-func/clip_annot.py
similarity index 93%
rename from functions/clip_annot.py
rename to 05-1class-func/clip_annot.py
index d0ad564..1fd7ff1 100644
--- a/functions/clip_annot.py
+++ b/05-1class-func/clip_annot.py
@@ -19,7 +19,7 @@
# BEGIN CLIP_ANNOT
-def clip(text:str, max_len:'int > 0'=80) -> str:
+def clip(text:str, max_len:'int > 0'=80) -> str: # <1>
"""Return text clipped at the last space before or after max_len
"""
end = None
diff --git a/functions/clip_annot_signature.rst b/05-1class-func/clip_annot_signature.rst
similarity index 100%
rename from functions/clip_annot_signature.rst
rename to 05-1class-func/clip_annot_signature.rst
diff --git a/functions/clip_introspection.rst b/05-1class-func/clip_introspection.rst
similarity index 100%
rename from functions/clip_introspection.rst
rename to 05-1class-func/clip_introspection.rst
diff --git a/functions/clip_signature.rst b/05-1class-func/clip_signature.rst
similarity index 100%
rename from functions/clip_signature.rst
rename to 05-1class-func/clip_signature.rst
diff --git a/functions/tagger.py b/05-1class-func/tagger.py
similarity index 100%
rename from functions/tagger.py
rename to 05-1class-func/tagger.py
diff --git a/06-dp-1class-func/README.rst b/06-dp-1class-func/README.rst
new file mode 100644
index 0000000..7ebe7dc
--- /dev/null
+++ b/06-dp-1class-func/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 6 - "Design patterns with first class functions"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/patterns-func/classic_strategy.py b/06-dp-1class-func/classic_strategy.py
similarity index 98%
rename from patterns-func/classic_strategy.py
rename to 06-dp-1class-func/classic_strategy.py
index 41bd437..0d8f7d6 100644
--- a/patterns-func/classic_strategy.py
+++ b/06-dp-1class-func/classic_strategy.py
@@ -73,7 +73,7 @@ class Promotion(ABC): # the Strategy: an Abstract Base Class
@abstractmethod
def discount(self, order):
- """Return discount as an positive dollar amount"""
+ """Return discount as a positive dollar amount"""
class FidelityPromo(Promotion): # first Concrete Strategy
diff --git a/patterns-func/promotions.py b/06-dp-1class-func/promotions.py
similarity index 100%
rename from patterns-func/promotions.py
rename to 06-dp-1class-func/promotions.py
diff --git a/patterns-func/strategy.py b/06-dp-1class-func/strategy.py
similarity index 100%
rename from patterns-func/strategy.py
rename to 06-dp-1class-func/strategy.py
diff --git a/patterns-func/strategy_best.py b/06-dp-1class-func/strategy_best.py
similarity index 100%
rename from patterns-func/strategy_best.py
rename to 06-dp-1class-func/strategy_best.py
diff --git a/patterns-func/strategy_best2.py b/06-dp-1class-func/strategy_best2.py
similarity index 100%
rename from patterns-func/strategy_best2.py
rename to 06-dp-1class-func/strategy_best2.py
diff --git a/patterns-func/strategy_best3.py b/06-dp-1class-func/strategy_best3.py
similarity index 100%
rename from patterns-func/strategy_best3.py
rename to 06-dp-1class-func/strategy_best3.py
diff --git a/07-closure-deco/README.rst b/07-closure-deco/README.rst
new file mode 100644
index 0000000..0d88de4
--- /dev/null
+++ b/07-closure-deco/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 7 - "Closures and decorators"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/decorators/average.py b/07-closure-deco/average.py
similarity index 100%
rename from decorators/average.py
rename to 07-closure-deco/average.py
diff --git a/decorators/average_oo.py b/07-closure-deco/average_oo.py
similarity index 100%
rename from decorators/average_oo.py
rename to 07-closure-deco/average_oo.py
diff --git a/decorators/clockdeco.py b/07-closure-deco/clockdeco.py
similarity index 100%
rename from decorators/clockdeco.py
rename to 07-closure-deco/clockdeco.py
diff --git a/07-closure-deco/clockdeco_cls.py b/07-closure-deco/clockdeco_cls.py
new file mode 100644
index 0000000..5b19119
--- /dev/null
+++ b/07-closure-deco/clockdeco_cls.py
@@ -0,0 +1,43 @@
+# clockdeco_param.py
+
+"""
+>>> snooze(.1) # doctest: +ELLIPSIS
+[0.101...s] snooze(0.1) -> None
+>>> clock('{name}: {elapsed}')(time.sleep)(.2) # doctest: +ELLIPSIS
+sleep: 0.20...
+>>> clock('{name}({args}) dt={elapsed:0.3f}s')(time.sleep)(.2)
+sleep(0.2) dt=0.201s
+"""
+
+# BEGIN CLOCKDECO_CLS
+import time
+
+DEFAULT_FMT = '[{elapsed:0.8f}s] {name}({args}) -> {result}'
+
+class clock:
+
+ def __init__(self, fmt=DEFAULT_FMT):
+ self.fmt = fmt
+
+ def __call__(self, func):
+ def clocked(*_args):
+ t0 = time.time()
+ _result = func(*_args)
+ elapsed = time.time() - t0
+ name = func.__name__
+ args = ', '.join(repr(arg) for arg in _args)
+ result = repr(_result)
+ print(self.fmt.format(**locals()))
+ return _result
+ return clocked
+
+if __name__ == '__main__':
+
+ @clock()
+ def snooze(seconds):
+ time.sleep(seconds)
+
+ for i in range(3):
+ snooze(.123)
+
+# END CLOCKDECO_CLS
diff --git a/decorators/clockdeco_demo.py b/07-closure-deco/clockdeco_demo.py
similarity index 100%
rename from decorators/clockdeco_demo.py
rename to 07-closure-deco/clockdeco_demo.py
diff --git a/decorators/clockdeco_param.py b/07-closure-deco/clockdeco_param.py
similarity index 100%
rename from decorators/clockdeco_param.py
rename to 07-closure-deco/clockdeco_param.py
diff --git a/decorators/clockdeco_param_demo1.py b/07-closure-deco/clockdeco_param_demo1.py
similarity index 100%
rename from decorators/clockdeco_param_demo1.py
rename to 07-closure-deco/clockdeco_param_demo1.py
diff --git a/decorators/clockdeco_param_demo2.py b/07-closure-deco/clockdeco_param_demo2.py
similarity index 100%
rename from decorators/clockdeco_param_demo2.py
rename to 07-closure-deco/clockdeco_param_demo2.py
diff --git a/decorators/fibo_demo.py b/07-closure-deco/fibo_demo.py
similarity index 100%
rename from decorators/fibo_demo.py
rename to 07-closure-deco/fibo_demo.py
diff --git a/decorators/fibo_demo_lru.py b/07-closure-deco/fibo_demo_lru.py
similarity index 100%
rename from decorators/fibo_demo_lru.py
rename to 07-closure-deco/fibo_demo_lru.py
diff --git a/decorators/generic.py b/07-closure-deco/generic.py
similarity index 100%
rename from decorators/generic.py
rename to 07-closure-deco/generic.py
diff --git a/closures/global_x_local.rst b/07-closure-deco/global_x_local.rst
similarity index 100%
rename from closures/global_x_local.rst
rename to 07-closure-deco/global_x_local.rst
diff --git a/decorators/registration.py b/07-closure-deco/registration.py
similarity index 100%
rename from decorators/registration.py
rename to 07-closure-deco/registration.py
diff --git a/decorators/registration_abridged.py b/07-closure-deco/registration_abridged.py
similarity index 100%
rename from decorators/registration_abridged.py
rename to 07-closure-deco/registration_abridged.py
diff --git a/decorators/registration_param.py b/07-closure-deco/registration_param.py
similarity index 84%
rename from decorators/registration_param.py
rename to 07-closure-deco/registration_param.py
index ce81975..7cbea22 100644
--- a/decorators/registration_param.py
+++ b/07-closure-deco/registration_param.py
@@ -8,9 +8,8 @@ def decorate(func): # <3>
% (active, func))
if active: # <4>
registry.add(func)
- else: # <5>
- if func in registry:
- registry.remove(func)
+ else:
+ registry.discard(func) # <5>
return func # <6>
return decorate # <7>
diff --git a/decorators/strategy_best4.py b/07-closure-deco/strategy_best4.py
similarity index 100%
rename from decorators/strategy_best4.py
rename to 07-closure-deco/strategy_best4.py
diff --git a/08-obj-ref/README.rst b/08-obj-ref/README.rst
new file mode 100644
index 0000000..deac2fa
--- /dev/null
+++ b/08-obj-ref/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 8 - "Object references, mutability and recycling"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/objects/bus.py b/08-obj-ref/bus.py
similarity index 100%
rename from objects/bus.py
rename to 08-obj-ref/bus.py
diff --git a/objects/cheese.py b/08-obj-ref/cheese.py
similarity index 100%
rename from objects/cheese.py
rename to 08-obj-ref/cheese.py
diff --git a/objects/haunted_bus.py b/08-obj-ref/haunted_bus.py
similarity index 70%
rename from objects/haunted_bus.py
rename to 08-obj-ref/haunted_bus.py
index a463dcd..a38bab1 100644
--- a/objects/haunted_bus.py
+++ b/08-obj-ref/haunted_bus.py
@@ -1,16 +1,16 @@
"""
->>> bus1 = HountedBus(['Alice', 'Bill'])
+>>> bus1 = HauntedBus(['Alice', 'Bill'])
>>> bus1.passengers
['Alice', 'Bill']
>>> bus1.pick('Charlie')
>>> bus1.drop('Alice')
>>> bus1.passengers
['Bill', 'Charlie']
->>> bus2 = HountedBus()
+>>> bus2 = HauntedBus()
>>> bus2.pick('Carrie')
>>> bus2.passengers
['Carrie']
->>> bus3 = HountedBus()
+>>> bus3 = HauntedBus()
>>> bus3.passengers
['Carrie']
>>> bus3.pick('Dave')
@@ -22,18 +22,18 @@
['Bill', 'Charlie']
->>> dir(HountedBus.__init__) # doctest: +ELLIPSIS
+>>> dir(HauntedBus.__init__) # doctest: +ELLIPSIS
['__annotations__', '__call__', ..., '__defaults__', ...]
->>> HountedBus.__init__.__defaults__
+>>> HauntedBus.__init__.__defaults__
(['Carrie', 'Dave'],)
->>> HountedBus.__init__.__defaults__[0] is bus2.passengers
+>>> HauntedBus.__init__.__defaults__[0] is bus2.passengers
True
"""
# BEGIN HAUNTED_BUS_CLASS
-class HountedBus:
- """A bus model hounted by ghost passengers"""
+class HauntedBus:
+ """A bus model haunted by ghost passengers"""
def __init__(self, passengers=[]): # <1>
self.passengers = passengers # <2>
diff --git a/objects/twilight_bus.py b/08-obj-ref/twilight_bus.py
similarity index 100%
rename from objects/twilight_bus.py
rename to 08-obj-ref/twilight_bus.py
diff --git a/09-pythonic-obj/README.rst b/09-pythonic-obj/README.rst
new file mode 100644
index 0000000..f5f20ba
--- /dev/null
+++ b/09-pythonic-obj/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 9 - "Pythonic objects"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/classes/mem_test.py b/09-pythonic-obj/mem_test.py
similarity index 100%
rename from classes/mem_test.py
rename to 09-pythonic-obj/mem_test.py
diff --git a/classes/private/Confidential.java b/09-pythonic-obj/private/Confidential.java
similarity index 100%
rename from classes/private/Confidential.java
rename to 09-pythonic-obj/private/Confidential.java
diff --git a/classes/private/Expose.java b/09-pythonic-obj/private/Expose.java
similarity index 100%
rename from classes/private/Expose.java
rename to 09-pythonic-obj/private/Expose.java
diff --git a/classes/private/expose.py b/09-pythonic-obj/private/expose.py
similarity index 100%
rename from classes/private/expose.py
rename to 09-pythonic-obj/private/expose.py
diff --git a/classes/private/leakprivate.py b/09-pythonic-obj/private/leakprivate.py
similarity index 100%
rename from classes/private/leakprivate.py
rename to 09-pythonic-obj/private/leakprivate.py
diff --git a/classes/private/no_respect.py b/09-pythonic-obj/private/no_respect.py
similarity index 100%
rename from classes/private/no_respect.py
rename to 09-pythonic-obj/private/no_respect.py
diff --git a/classes/vector2d_v0.py b/09-pythonic-obj/vector2d_v0.py
similarity index 100%
rename from classes/vector2d_v0.py
rename to 09-pythonic-obj/vector2d_v0.py
diff --git a/classes/vector2d_v1.py b/09-pythonic-obj/vector2d_v1.py
similarity index 100%
rename from classes/vector2d_v1.py
rename to 09-pythonic-obj/vector2d_v1.py
diff --git a/classes/vector2d_v2.py b/09-pythonic-obj/vector2d_v2.py
similarity index 100%
rename from classes/vector2d_v2.py
rename to 09-pythonic-obj/vector2d_v2.py
diff --git a/classes/vector2d_v2_fmt_snippet.py b/09-pythonic-obj/vector2d_v2_fmt_snippet.py
similarity index 100%
rename from classes/vector2d_v2_fmt_snippet.py
rename to 09-pythonic-obj/vector2d_v2_fmt_snippet.py
diff --git a/classes/vector2d_v3.py b/09-pythonic-obj/vector2d_v3.py
similarity index 100%
rename from classes/vector2d_v3.py
rename to 09-pythonic-obj/vector2d_v3.py
diff --git a/classes/vector2d_v3_prophash.py b/09-pythonic-obj/vector2d_v3_prophash.py
similarity index 100%
rename from classes/vector2d_v3_prophash.py
rename to 09-pythonic-obj/vector2d_v3_prophash.py
diff --git a/classes/vector2d_v3_slots.py b/09-pythonic-obj/vector2d_v3_slots.py
similarity index 100%
rename from classes/vector2d_v3_slots.py
rename to 09-pythonic-obj/vector2d_v3_slots.py
diff --git a/10-seq-hacking/README.rst b/10-seq-hacking/README.rst
new file mode 100644
index 0000000..2a905cb
--- /dev/null
+++ b/10-seq-hacking/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 10 - "Sequence hacking, hashing and slicing"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/classes/vector_v1.py b/10-seq-hacking/vector_v1.py
similarity index 100%
rename from classes/vector_v1.py
rename to 10-seq-hacking/vector_v1.py
diff --git a/classes/vector_v2.py b/10-seq-hacking/vector_v2.py
similarity index 92%
rename from classes/vector_v2.py
rename to 10-seq-hacking/vector_v2.py
index fd94ae7..cfa7891 100644
--- a/classes/vector_v2.py
+++ b/10-seq-hacking/vector_v2.py
@@ -112,6 +112,7 @@
from array import array
import reprlib
import math
+import numbers
class Vector:
@@ -150,13 +151,13 @@ def __len__(self):
def __getitem__(self, index):
cls = type(self) # <1>
- if isinstance(index, slice):
- return cls(self._components[index]) # <2>
- elif isinstance(index, int):
- return self._components[index] # <3>
+ if isinstance(index, slice): # <2>
+ return cls(self._components[index]) # <3>
+ elif isinstance(index, numbers.Integral): # <4>
+ return self._components[index] # <5>
else:
msg = '{cls.__name__} indices must be integers'
- raise TypeError(msg.format(cls=cls)) # <4>
+ raise TypeError(msg.format(cls=cls)) # <6>
# END VECTOR_V2
@classmethod
diff --git a/classes/vector_v3.py b/10-seq-hacking/vector_v3.py
similarity index 98%
rename from classes/vector_v3.py
rename to 10-seq-hacking/vector_v3.py
index d248ee8..98e3cff 100644
--- a/classes/vector_v3.py
+++ b/10-seq-hacking/vector_v3.py
@@ -155,6 +155,7 @@
from array import array
import reprlib
import math
+import numbers
class Vector:
@@ -194,7 +195,7 @@ def __getitem__(self, index):
cls = type(self)
if isinstance(index, slice):
return cls(self._components[index])
- elif isinstance(index, int):
+ elif isinstance(index, numbers.Integral):
return self._components[index]
else:
msg = '{.__name__} indices must be integers'
diff --git a/classes/vector_v4.py b/10-seq-hacking/vector_v4.py
similarity index 91%
rename from classes/vector_v4.py
rename to 10-seq-hacking/vector_v4.py
index 93aec46..cb6d1e6 100644
--- a/classes/vector_v4.py
+++ b/10-seq-hacking/vector_v4.py
@@ -135,17 +135,22 @@
>>> v2 = Vector([3.1, 4.2])
>>> v3 = Vector([3, 4, 5])
>>> v6 = Vector(range(6))
- >>> hash(v1), hash(v2), hash(v3), hash(v6)
- (7, 384307168202284039, 2, 1)
- >>> len(set([v1, v2, v3, v6]))
- 4
+ >>> hash(v1), hash(v3), hash(v6)
+ (7, 2, 1)
+Most hash values of non-integers vary from a 32-bit to 64-bit CPython build::
+
+ >>> import sys
+ >>> hash(v2) == (384307168202284039 if sys.maxsize > 2**32 else 357915986)
+ True
+
"""
from array import array
import reprlib
import math
+import numbers
import functools
import operator
@@ -192,11 +197,11 @@ def __getitem__(self, index):
cls = type(self)
if isinstance(index, slice):
return cls(self._components[index])
- elif isinstance(index, int):
+ elif isinstance(index, numbers.Integral):
return self._components[index]
else:
- msg = '{.__name__} indices must be integers'
- raise TypeError(msg.format(cls))
+ msg = '{cls.__name__} indices must be integers'
+ raise TypeError(msg.format(cls=cls))
shortcut_names = 'xyzt'
diff --git a/classes/vector_v5.py b/10-seq-hacking/vector_v5.py
similarity index 95%
rename from classes/vector_v5.py
rename to 10-seq-hacking/vector_v5.py
index 13a0c2f..cf327bc 100644
--- a/classes/vector_v5.py
+++ b/10-seq-hacking/vector_v5.py
@@ -136,10 +136,15 @@
>>> v2 = Vector([3.1, 4.2])
>>> v3 = Vector([3, 4, 5])
>>> v6 = Vector(range(6))
- >>> hash(v1), hash(v2), hash(v3), hash(v6)
- (7, 384307168202284039, 2, 1)
- >>> len(set([v1, v2, v3, v6]))
- 4
+ >>> hash(v1), hash(v3), hash(v6)
+ (7, 2, 1)
+
+
+Most hash values of non-integers vary from a 32-bit to 64-bit CPython build::
+
+ >>> import sys
+ >>> hash(v2) == (384307168202284039 if sys.maxsize > 2**32 else 357915986)
+ True
Tests of ``format()`` with Cartesian coordinates in 2D::
@@ -187,6 +192,7 @@
from array import array
import reprlib
import math
+import numbers
import functools
import operator
import itertools # <1>
@@ -234,7 +240,7 @@ def __getitem__(self, index):
cls = type(self)
if isinstance(index, slice):
return cls(self._components[index])
- elif isinstance(index, int):
+ elif isinstance(index, numbers.Integral):
return self._components[index]
else:
msg = '{.__name__} indices must be integers'
diff --git a/11-iface-abc/README.rst b/11-iface-abc/README.rst
new file mode 100644
index 0000000..b3a649b
--- /dev/null
+++ b/11-iface-abc/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 11 - "Interfaces, protocols and ABCs"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/11-iface-abc/bingo.py b/11-iface-abc/bingo.py
new file mode 100644
index 0000000..56ed288
--- /dev/null
+++ b/11-iface-abc/bingo.py
@@ -0,0 +1,28 @@
+# BEGIN TOMBOLA_BINGO
+
+import random
+
+from tombola import Tombola
+
+
+class BingoCage(Tombola): # <1>
+
+ def __init__(self, items):
+ self._randomizer = random.SystemRandom() # <2>
+ self._items = []
+ self.load(items) # <3>
+
+ def load(self, items):
+ self._items.extend(items)
+ self._randomizer.shuffle(self._items) # <4>
+
+ def pick(self): # <5>
+ try:
+ return self._items.pop()
+ except IndexError:
+ raise LookupError('pick from empty BingoCage')
+
+ def __call__(self): # <7>
+ self.pick()
+
+# END TOMBOLA_BINGO
diff --git a/interfaces/drum.py b/11-iface-abc/drum.py
similarity index 100%
rename from interfaces/drum.py
rename to 11-iface-abc/drum.py
diff --git a/sequences/frenchdeck2.py b/11-iface-abc/frenchdeck2.py
similarity index 100%
rename from sequences/frenchdeck2.py
rename to 11-iface-abc/frenchdeck2.py
diff --git a/11-iface-abc/lotto.py b/11-iface-abc/lotto.py
new file mode 100644
index 0000000..da8c2de
--- /dev/null
+++ b/11-iface-abc/lotto.py
@@ -0,0 +1,30 @@
+# BEGIN LOTTERY_BLOWER
+
+import random
+
+from tombola import Tombola
+
+
+class LotteryBlower(Tombola):
+
+ def __init__(self, iterable):
+ self._balls = list(iterable) # <1>
+
+ def load(self, iterable):
+ self._balls.extend(iterable)
+
+ def pick(self):
+ try:
+ position = random.randrange(len(self._balls)) # <2>
+ except ValueError:
+ raise LookupError('pick from empty LotteryBlower')
+ return self._balls.pop(position) # <3>
+
+ def loaded(self): # <4>
+ return bool(self._balls)
+
+ def inspect(self): # <5>
+ return tuple(sorted(self._balls))
+
+
+# END LOTTERY_BLOWER
diff --git a/11-iface-abc/tombola.py b/11-iface-abc/tombola.py
new file mode 100644
index 0000000..5ed0f85
--- /dev/null
+++ b/11-iface-abc/tombola.py
@@ -0,0 +1,35 @@
+# BEGIN TOMBOLA_ABC
+
+import abc
+
+class Tombola(abc.ABC): # <1>
+
+ @abc.abstractmethod
+ def load(self, iterable): # <2>
+ """Add items from an iterable."""
+
+ @abc.abstractmethod
+ def pick(self): # <3>
+ """Remove item at random, returning it.
+
+ This method should raise `LookupError` when the instance is empty.
+ """
+
+ def loaded(self): # <4>
+ """Return `True` if there's at least 1 item, `False` otherwise."""
+ return bool(self.inspect()) # <5>
+
+
+ def inspect(self):
+ """Return a sorted tuple with the items currently inside."""
+ items = []
+ while True: # <6>
+ try:
+ items.append(self.pick())
+ except LookupError:
+ break
+ self.load(items) # <7>
+ return tuple(sorted(items))
+
+
+# END TOMBOLA_ABC
diff --git a/interfaces/tombola_runner.py b/11-iface-abc/tombola_runner.py
similarity index 100%
rename from interfaces/tombola_runner.py
rename to 11-iface-abc/tombola_runner.py
diff --git a/interfaces/tombola_subhook.py b/11-iface-abc/tombola_subhook.py
similarity index 100%
rename from interfaces/tombola_subhook.py
rename to 11-iface-abc/tombola_subhook.py
diff --git a/interfaces/tombola_tests.rst b/11-iface-abc/tombola_tests.rst
similarity index 77%
rename from interfaces/tombola_tests.rst
rename to 11-iface-abc/tombola_tests.rst
index 30f0164..1489903 100644
--- a/interfaces/tombola_tests.rst
+++ b/11-iface-abc/tombola_tests.rst
@@ -11,6 +11,8 @@ Create and load instance from iterable::
>>> globe = ConcreteTombola(balls)
>>> globe.loaded()
True
+ >>> globe.inspect()
+ (0, 1, 2)
Pick and collect balls::
@@ -50,12 +52,12 @@ thrown when the device is empty::
OK
-Load and pick 100 balls to verify that they are all come out::
+Load and pick 100 balls to verify that they all come out::
>>> balls = list(range(100))
>>> globe = ConcreteTombola(balls)
>>> picks = []
- >>> while globe.loaded():
+ >>> while globe.inspect():
... picks.append(globe.pick())
>>> len(picks) == len(balls)
True
@@ -63,7 +65,7 @@ Load and pick 100 balls to verify that they are all come out::
True
-Check that the order has changed is not simply reversed either::
+Check that the order has changed and is not simply reversed::
>>> picks != balls
True
@@ -71,9 +73,9 @@ Check that the order has changed is not simply reversed either::
True
Note: the previous 2 tests have a *very* small chance of failing
-even if the implementation is OK. The probability of the 100
-balls coming out, by chance, in the order they were loaded is
-1/100!, or approximately 1.07e-158. It's much easier to win the
+even if the implementation is OK. The probability of the 100
+balls coming out, by chance, in the order they were inspect is
+1/100!, or approximately 1.07e-158. It's much easier to win the
Lotto or to become a billionaire working as a programmer.
THE END
diff --git a/interfaces/tombolist.py b/11-iface-abc/tombolist.py
similarity index 55%
rename from interfaces/tombolist.py
rename to 11-iface-abc/tombolist.py
index ddf162c..b3ca2a6 100644
--- a/interfaces/tombolist.py
+++ b/11-iface-abc/tombolist.py
@@ -8,12 +8,16 @@ class TomboList(list): # <2>
def pick(self):
if self: # <3>
position = randrange(len(self))
- return super().pop(position) # <4>
+ return self.pop(position) # <4>
else:
raise LookupError('pop from empty TomboList')
- def load(self, iterable): self.extend(iterable) # <5>
+ load = list.extend # <5>
- def loaded(self): return bool(self) # <6>
+ def loaded(self):
+ return bool(self) # <6>
-# Tombola.register(TomboList) # <- Python 3.2 or earlier
+ def inspect(self):
+ return tuple(sorted(self))
+
+# Tombola.register(TomboList) # <7>
diff --git a/12-inheritance/README.rst b/12-inheritance/README.rst
new file mode 100644
index 0000000..1b5f918
--- /dev/null
+++ b/12-inheritance/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 12 - "Inheritance: for good or for worse"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/interfaces/diamond.py b/12-inheritance/diamond.py
similarity index 65%
rename from interfaces/diamond.py
rename to 12-inheritance/diamond.py
index 2d63877..5aaac18 100644
--- a/interfaces/diamond.py
+++ b/12-inheritance/diamond.py
@@ -14,6 +14,14 @@ def pong(self):
class D(B, C):
+
+ def ping(self):
+ super().ping()
+ print('post-ping:', self)
+
def pingpong(self):
+ self.ping()
super().ping()
+ self.pong()
super().pong()
+ C.pong(self)
diff --git a/13-op-overloading/README.rst b/13-op-overloading/README.rst
new file mode 100644
index 0000000..f872435
--- /dev/null
+++ b/13-op-overloading/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 13 - "Operator overloading: doing it right"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/13-op-overloading/bingo.py b/13-op-overloading/bingo.py
new file mode 100644
index 0000000..56ed288
--- /dev/null
+++ b/13-op-overloading/bingo.py
@@ -0,0 +1,28 @@
+# BEGIN TOMBOLA_BINGO
+
+import random
+
+from tombola import Tombola
+
+
+class BingoCage(Tombola): # <1>
+
+ def __init__(self, items):
+ self._randomizer = random.SystemRandom() # <2>
+ self._items = []
+ self.load(items) # <3>
+
+ def load(self, items):
+ self._items.extend(items)
+ self._randomizer.shuffle(self._items) # <4>
+
+ def pick(self): # <5>
+ try:
+ return self._items.pop()
+ except IndexError:
+ raise LookupError('pick from empty BingoCage')
+
+ def __call__(self): # <7>
+ self.pick()
+
+# END TOMBOLA_BINGO
diff --git a/13-op-overloading/bingoaddable.py b/13-op-overloading/bingoaddable.py
new file mode 100644
index 0000000..976aed9
--- /dev/null
+++ b/13-op-overloading/bingoaddable.py
@@ -0,0 +1,86 @@
+"""
+======================
+AddableBingoCage tests
+======================
+
+
+Tests for __add__:
+
+# BEGIN ADDABLE_BINGO_ADD_DEMO
+
+ >>> vowels = 'AEIOU'
+ >>> globe = AddableBingoCage(vowels) # <1>
+ >>> globe.inspect()
+ ('A', 'E', 'I', 'O', 'U')
+ >>> globe.pick() in vowels # <2>
+ True
+ >>> len(globe.inspect()) # <3>
+ 4
+ >>> globe2 = AddableBingoCage('XYZ') # <4>
+ >>> globe3 = globe + globe2
+ >>> len(globe3.inspect()) # <5>
+ 7
+ >>> void = globe + [10, 20] # <6>
+ Traceback (most recent call last):
+ ...
+ TypeError: unsupported operand type(s) for +: 'AddableBingoCage' and 'list'
+
+
+# END ADDABLE_BINGO_ADD_DEMO
+
+Tests for __iadd__:
+
+# BEGIN ADDABLE_BINGO_IADD_DEMO
+
+ >>> globe_orig = globe # <1>
+ >>> len(globe.inspect()) # <2>
+ 4
+ >>> globe += globe2 # <3>
+ >>> len(globe.inspect())
+ 7
+ >>> globe += ['M', 'N'] # <4>
+ >>> len(globe.inspect())
+ 9
+ >>> globe is globe_orig # <5>
+ True
+ >>> globe += 1 # <6>
+ Traceback (most recent call last):
+ ...
+ TypeError: right operand in += must be 'AddableBingoCage' or an iterable
+
+# END ADDABLE_BINGO_IADD_DEMO
+
+"""
+
+# BEGIN ADDABLE_BINGO
+import itertools # <1>
+
+from tombola import Tombola
+from bingo import BingoCage
+
+
+class AddableBingoCage(BingoCage): # <2>
+
+ def __add__(self, other):
+ if isinstance(other, Tombola): # <3>
+ return AddableBingoCage(self.inspect() + other.inspect()) # <6>
+ else:
+ return NotImplemented
+
+ def __iadd__(self, other):
+ if isinstance(other, Tombola):
+ other_iterable = other.inspect() # <4>
+ else:
+ try:
+ other_iterable = iter(other) # <5>
+ except TypeError: # <6>
+ self_cls = type(self).__name__
+ msg = "right operand in += must be {!r} or an iterable"
+ raise TypeError(msg.format(self_cls))
+ self.load(other_iterable) # <7>
+ return self # <8>
+
+
+
+
+# END ADDABLE_BINGO
diff --git a/13-op-overloading/tombola.py b/13-op-overloading/tombola.py
new file mode 100644
index 0000000..5ed0f85
--- /dev/null
+++ b/13-op-overloading/tombola.py
@@ -0,0 +1,35 @@
+# BEGIN TOMBOLA_ABC
+
+import abc
+
+class Tombola(abc.ABC): # <1>
+
+ @abc.abstractmethod
+ def load(self, iterable): # <2>
+ """Add items from an iterable."""
+
+ @abc.abstractmethod
+ def pick(self): # <3>
+ """Remove item at random, returning it.
+
+ This method should raise `LookupError` when the instance is empty.
+ """
+
+ def loaded(self): # <4>
+ """Return `True` if there's at least 1 item, `False` otherwise."""
+ return bool(self.inspect()) # <5>
+
+
+ def inspect(self):
+ """Return a sorted tuple with the items currently inside."""
+ items = []
+ while True: # <6>
+ try:
+ items.append(self.pick())
+ except LookupError:
+ break
+ self.load(items) # <7>
+ return tuple(sorted(items))
+
+
+# END TOMBOLA_ABC
diff --git a/13-op-overloading/unary_plus_decimal.py b/13-op-overloading/unary_plus_decimal.py
new file mode 100644
index 0000000..46e481f
--- /dev/null
+++ b/13-op-overloading/unary_plus_decimal.py
@@ -0,0 +1,35 @@
+"""
+# BEGIN UNARY_PLUS_DECIMAL
+
+>>> import decimal
+>>> ctx = decimal.getcontext() # <1>
+>>> ctx.prec = 40 # <2>
+>>> one_third = decimal.Decimal('1') / decimal.Decimal('3') # <3>
+>>> one_third # <4>
+Decimal('0.3333333333333333333333333333333333333333')
+>>> one_third == +one_third # <5>
+True
+>>> ctx.prec = 28 # <6>
+>>> one_third == +one_third # <7>
+False
+>>> +one_third # <8>
+Decimal('0.3333333333333333333333333333')
+
+# END UNARY_PLUS_DECIMAL
+
+"""
+
+import decimal
+
+if __name__ == '__main__':
+
+ with decimal.localcontext() as ctx:
+ ctx.prec = 40
+ print('precision:', ctx.prec)
+ one_third = decimal.Decimal('1') / decimal.Decimal('3')
+ print(' one_third:', one_third)
+ print(' +one_third:', +one_third)
+
+ print('precision:', decimal.getcontext().prec)
+ print(' one_third:', one_third)
+ print(' +one_third:', +one_third)
diff --git a/13-op-overloading/vector2d_v3.py b/13-op-overloading/vector2d_v3.py
new file mode 100644
index 0000000..5812dcf
--- /dev/null
+++ b/13-op-overloading/vector2d_v3.py
@@ -0,0 +1,151 @@
+"""
+A 2-dimensional vector class
+
+ >>> v1 = Vector2d(3, 4)
+ >>> print(v1.x, v1.y)
+ 3.0 4.0
+ >>> x, y = v1
+ >>> x, y
+ (3.0, 4.0)
+ >>> v1
+ Vector2d(3.0, 4.0)
+ >>> v1_clone = eval(repr(v1))
+ >>> v1 == v1_clone
+ True
+ >>> print(v1)
+ (3.0, 4.0)
+ >>> octets = bytes(v1)
+ >>> octets
+ b'd\\x00\\x00\\x00\\x00\\x00\\x00\\x08@\\x00\\x00\\x00\\x00\\x00\\x00\\x10@'
+ >>> abs(v1)
+ 5.0
+ >>> bool(v1), bool(Vector2d(0, 0))
+ (True, False)
+
+
+Test of ``.frombytes()`` class method:
+
+ >>> v1_clone = Vector2d.frombytes(bytes(v1))
+ >>> v1_clone
+ Vector2d(3.0, 4.0)
+ >>> v1 == v1_clone
+ True
+
+
+Tests of ``format()`` with Cartesian coordinates:
+
+ >>> format(v1)
+ '(3.0, 4.0)'
+ >>> format(v1, '.2f')
+ '(3.00, 4.00)'
+ >>> format(v1, '.3e')
+ '(3.000e+00, 4.000e+00)'
+
+
+Tests of the ``angle`` method::
+
+ >>> Vector2d(0, 0).angle()
+ 0.0
+ >>> Vector2d(1, 0).angle()
+ 0.0
+ >>> epsilon = 10**-8
+ >>> abs(Vector2d(0, 1).angle() - math.pi/2) < epsilon
+ True
+ >>> abs(Vector2d(1, 1).angle() - math.pi/4) < epsilon
+ True
+
+
+Tests of ``format()`` with polar coordinates:
+
+ >>> format(Vector2d(1, 1), 'p') # doctest:+ELLIPSIS
+ '<1.414213..., 0.785398...>'
+ >>> format(Vector2d(1, 1), '.3ep')
+ '<1.414e+00, 7.854e-01>'
+ >>> format(Vector2d(1, 1), '0.5fp')
+ '<1.41421, 0.78540>'
+
+
+Tests of `x` and `y` read-only properties:
+
+ >>> v1.x, v1.y
+ (3.0, 4.0)
+ >>> v1.x = 123
+ Traceback (most recent call last):
+ ...
+ AttributeError: can't set attribute
+
+
+Tests of hashing:
+
+ >>> v1 = Vector2d(3, 4)
+ >>> v2 = Vector2d(3.1, 4.2)
+ >>> hash(v1), hash(v2)
+ (7, 384307168202284039)
+ >>> len(set([v1, v2]))
+ 2
+
+"""
+
+from array import array
+import math
+
+class Vector2d:
+ typecode = 'd'
+
+ def __init__(self, x, y):
+ self.__x = float(x)
+ self.__y = float(y)
+
+ @property
+ def x(self):
+ return self.__x
+
+ @property
+ def y(self):
+ return self.__y
+
+ def __iter__(self):
+ return (i for i in (self.x, self.y))
+
+ def __repr__(self):
+ class_name = type(self).__name__
+ return '{}({!r}, {!r})'.format(class_name, *self)
+
+ def __str__(self):
+ return str(tuple(self))
+
+ def __bytes__(self):
+ return (bytes([ord(self.typecode)]) +
+ bytes(array(self.typecode, self)))
+
+ def __eq__(self, other):
+ return tuple(self) == tuple(other)
+
+ def __hash__(self):
+ return hash(self.x) ^ hash(self.y)
+
+ def __abs__(self):
+ return math.hypot(self.x, self.y)
+
+ def __bool__(self):
+ return bool(abs(self))
+
+ def angle(self):
+ return math.atan2(self.y, self.x)
+
+ def __format__(self, fmt_spec=''):
+ if fmt_spec.endswith('p'):
+ fmt_spec = fmt_spec[:-1]
+ coords = (abs(self), self.angle())
+ outer_fmt = '<{}, {}>'
+ else:
+ coords = self
+ outer_fmt = '({}, {})'
+ components = (format(c, fmt_spec) for c in coords)
+ return outer_fmt.format(*components)
+
+ @classmethod
+ def frombytes(cls, octets):
+ typecode = chr(octets[0])
+ memv = memoryview(octets[1:]).cast(typecode)
+ return cls(*memv)
diff --git a/classes/vector_py3_5.py b/13-op-overloading/vector_py3_5.py
similarity index 97%
rename from classes/vector_py3_5.py
rename to 13-op-overloading/vector_py3_5.py
index 675be99..32a59fb 100644
--- a/classes/vector_py3_5.py
+++ b/13-op-overloading/vector_py3_5.py
@@ -137,10 +137,15 @@
>>> v2 = Vector([3.1, 4.2])
>>> v3 = Vector([3, 4, 5])
>>> v6 = Vector(range(6))
- >>> hash(v1), hash(v2), hash(v3), hash(v6)
- (7, 384307168202284039, 2, 1)
- >>> len(set([v1, v2, v3, v6]))
- 4
+ >>> hash(v1), hash(v3), hash(v6)
+ (7, 2, 1)
+
+
+Most hash values of non-integers vary from a 32-bit to 64-bit Python build::
+
+ >>> import sys
+ >>> hash(v2) == (384307168202284039 if sys.maxsize > 2**32 else 357915986)
+ True
Tests of ``format()`` with Cartesian coordinates in 2D::
diff --git a/classes/vector_v6.py b/13-op-overloading/vector_v6.py
similarity index 92%
rename from classes/vector_v6.py
rename to 13-op-overloading/vector_v6.py
index 2942f92..ff7599f 100644
--- a/classes/vector_v6.py
+++ b/13-op-overloading/vector_v6.py
@@ -135,10 +135,15 @@
>>> v2 = Vector([3.1, 4.2])
>>> v3 = Vector([3, 4, 5])
>>> v6 = Vector(range(6))
- >>> hash(v1), hash(v2), hash(v3), hash(v6)
- (7, 384307168202284039, 2, 1)
- >>> len(set([v1, v2, v3, v6]))
- 4
+ >>> hash(v1), hash(v3), hash(v6)
+ (7, 2, 1)
+
+
+Most hash values of non-integers vary from a 32-bit to 64-bit Python build::
+
+ >>> import sys
+ >>> hash(v2) == (384307168202284039 if sys.maxsize > 2**32 else 357915986)
+ True
Tests of ``format()`` with Cartesian coordinates in 2D::
@@ -183,6 +188,17 @@
'<1.00000, 1.57080, 0.00000, 0.00000>'
+Unary operator tests::
+
+ >>> v1 = Vector([3, 4])
+ >>> abs(v1)
+ 5.0
+ >>> -v1
+ Vector([-3.0, -4.0])
+ >>> +v1
+ Vector([3.0, 4.0])
+
+
Basic tests of operator ``+``::
>>> v1 = Vector([3, 4, 5])
@@ -231,6 +247,7 @@
from array import array
import reprlib
import math
+import numbers
import functools
import operator
import itertools
@@ -265,9 +282,17 @@ def __hash__(self):
hashes = (hash(x) for x in self)
return functools.reduce(operator.xor, hashes, 0)
+# BEGIN VECTOR_V6_UNARY
def __abs__(self):
return math.sqrt(sum(x * x for x in self))
+ def __neg__(self):
+ return Vector(-x for x in self) # <1>
+
+ def __pos__(self):
+ return Vector(self) # <2>
+# END VECTOR_V6_UNARY
+
def __bool__(self):
return bool(abs(self))
@@ -278,7 +303,7 @@ def __getitem__(self, index):
cls = type(self)
if isinstance(index, slice):
return cls(self._components[index])
- elif isinstance(index, int):
+ elif isinstance(index, numbers.Integral):
return self._components[index]
else:
msg = '{.__name__} indices must be integers'
@@ -324,7 +349,7 @@ def frombytes(cls, octets):
memv = memoryview(octets[1:]).cast(typecode)
return cls(memv)
-# BEGIN VECTOR_V6
+# BEGIN VECTOR_V6_ADD
def __add__(self, other):
try:
pairs = itertools.zip_longest(self, other, fillvalue=0.0)
@@ -334,4 +359,4 @@ def __add__(self, other):
def __radd__(self, other):
return self + other
-# END VECTOR_V6
+# END VECTOR_V6_ADD
diff --git a/classes/vector_v7.py b/13-op-overloading/vector_v7.py
similarity index 94%
rename from classes/vector_v7.py
rename to 13-op-overloading/vector_v7.py
index 73432ba..a550318 100644
--- a/classes/vector_v7.py
+++ b/13-op-overloading/vector_v7.py
@@ -135,10 +135,15 @@
>>> v2 = Vector([3.1, 4.2])
>>> v3 = Vector([3, 4, 5])
>>> v6 = Vector(range(6))
- >>> hash(v1), hash(v2), hash(v3), hash(v6)
- (7, 384307168202284039, 2, 1)
- >>> len(set([v1, v2, v3, v6]))
- 4
+ >>> hash(v1), hash(v3), hash(v6)
+ (7, 2, 1)
+
+
+Most hash values of non-integers vary from a 32-bit to 64-bit Python build::
+
+ >>> import sys
+ >>> hash(v2) == (384307168202284039 if sys.maxsize > 2**32 else 357915986)
+ True
Tests of ``format()`` with Cartesian coordinates in 2D::
@@ -183,6 +188,17 @@
'<1.00000, 1.57080, 0.00000, 0.00000>'
+Unary operator tests::
+
+ >>> v1 = Vector([3, 4])
+ >>> abs(v1)
+ 5.0
+ >>> -v1
+ Vector([-3.0, -4.0])
+ >>> +v1
+ Vector([3.0, 4.0])
+
+
Basic tests of operator ``+``::
>>> v1 = Vector([3, 4, 5])
@@ -258,10 +274,10 @@
from array import array
import reprlib
import math
+import numbers
import functools
import operator
import itertools
-import numbers
class Vector:
@@ -296,6 +312,12 @@ def __hash__(self):
def __abs__(self):
return math.sqrt(sum(x * x for x in self))
+ def __neg__(self):
+ return Vector(-x for x in self)
+
+ def __pos__(self):
+ return Vector(self)
+
def __bool__(self):
return bool(abs(self))
@@ -306,7 +328,7 @@ def __getitem__(self, index):
cls = type(self)
if isinstance(index, slice):
return cls(self._components[index])
- elif isinstance(index, int):
+ elif isinstance(index, numbers.Integral):
return self._components[index]
else:
msg = '{.__name__} indices must be integers'
diff --git a/classes/vector_v8.py b/13-op-overloading/vector_v8.py
similarity index 94%
rename from classes/vector_v8.py
rename to 13-op-overloading/vector_v8.py
index e743b33..bcf4fba 100644
--- a/classes/vector_v8.py
+++ b/13-op-overloading/vector_v8.py
@@ -135,10 +135,15 @@
>>> v2 = Vector([3.1, 4.2])
>>> v3 = Vector([3, 4, 5])
>>> v6 = Vector(range(6))
- >>> hash(v1), hash(v2), hash(v3), hash(v6)
- (7, 384307168202284039, 2, 1)
- >>> len(set([v1, v2, v3, v6]))
- 4
+ >>> hash(v1), hash(v3), hash(v6)
+ (7, 2, 1)
+
+
+Most hash values of non-integers vary from a 32-bit to 64-bit Python build::
+
+ >>> import sys
+ >>> hash(v2) == (384307168202284039 if sys.maxsize > 2**32 else 357915986)
+ True
Tests of ``format()`` with Cartesian coordinates in 2D::
@@ -183,6 +188,17 @@
'<1.00000, 1.57080, 0.00000, 0.00000>'
+Unary operator tests::
+
+ >>> v1 = Vector([3, 4])
+ >>> abs(v1)
+ 5.0
+ >>> -v1
+ Vector([-3.0, -4.0])
+ >>> +v1
+ Vector([3.0, 4.0])
+
+
Basic tests of operator ``+``::
>>> v1 = Vector([3, 4, 5])
@@ -283,10 +299,10 @@
from array import array
import reprlib
import math
+import numbers
import functools
import operator
import itertools
-import numbers
class Vector:
@@ -326,6 +342,12 @@ def __hash__(self):
def __abs__(self):
return math.sqrt(sum(x * x for x in self))
+ def __neg__(self):
+ return Vector(-x for x in self)
+
+ def __pos__(self):
+ return Vector(self)
+
def __bool__(self):
return bool(abs(self))
@@ -336,7 +358,7 @@ def __getitem__(self, index):
cls = type(self)
if isinstance(index, slice):
return cls(self._components[index])
- elif isinstance(index, int):
+ elif isinstance(index, numbers.Integral):
return self._components[index]
else:
msg = '{.__name__} indices must be integers'
diff --git a/14-it-generator/README.rst b/14-it-generator/README.rst
new file mode 100644
index 0000000..26eb7b2
--- /dev/null
+++ b/14-it-generator/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 14 - "Iterables, iterators and generators"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/iterables/aritprog.rst b/14-it-generator/aritprog.rst
similarity index 100%
rename from iterables/aritprog.rst
rename to 14-it-generator/aritprog.rst
diff --git a/iterables/aritprog_float_error.py b/14-it-generator/aritprog_float_error.py
similarity index 100%
rename from iterables/aritprog_float_error.py
rename to 14-it-generator/aritprog_float_error.py
diff --git a/iterables/aritprog_runner.py b/14-it-generator/aritprog_runner.py
similarity index 100%
rename from iterables/aritprog_runner.py
rename to 14-it-generator/aritprog_runner.py
diff --git a/iterables/aritprog_v0.py b/14-it-generator/aritprog_v0.py
similarity index 100%
rename from iterables/aritprog_v0.py
rename to 14-it-generator/aritprog_v0.py
diff --git a/iterables/aritprog_v1.py b/14-it-generator/aritprog_v1.py
similarity index 100%
rename from iterables/aritprog_v1.py
rename to 14-it-generator/aritprog_v1.py
diff --git a/iterables/aritprog_v2.py b/14-it-generator/aritprog_v2.py
similarity index 100%
rename from iterables/aritprog_v2.py
rename to 14-it-generator/aritprog_v2.py
diff --git a/iterables/aritprog_v3.py b/14-it-generator/aritprog_v3.py
similarity index 100%
rename from iterables/aritprog_v3.py
rename to 14-it-generator/aritprog_v3.py
diff --git a/iterables/fibo_by_hand.py b/14-it-generator/fibo_by_hand.py
similarity index 100%
rename from iterables/fibo_by_hand.py
rename to 14-it-generator/fibo_by_hand.py
diff --git a/14-it-generator/isis2json/README.rst b/14-it-generator/isis2json/README.rst
new file mode 100644
index 0000000..072d26a
--- /dev/null
+++ b/14-it-generator/isis2json/README.rst
@@ -0,0 +1,12 @@
+isis2json.py
+============
+
+This directory contains a copy of the ``isis2json.py`` script, with
+minimal dependencies, just to allow the O'Reilly Atlas toolchain to
+render the listing of the script in appendix A of the book.
+
+If you want to use or contribute to this script, please get the full
+source code with all dependencies from the main ``isis2json``
+repository:
+
+https://github.com/fluentpython/isis2json
diff --git a/support/isis2json/isis2json.py b/14-it-generator/isis2json/isis2json.py
similarity index 100%
rename from support/isis2json/isis2json.py
rename to 14-it-generator/isis2json/isis2json.py
diff --git a/support/isis2json/iso2709.py b/14-it-generator/isis2json/iso2709.py
similarity index 100%
rename from support/isis2json/iso2709.py
rename to 14-it-generator/isis2json/iso2709.py
diff --git a/support/isis2json/subfield.py b/14-it-generator/isis2json/subfield.py
similarity index 100%
rename from support/isis2json/subfield.py
rename to 14-it-generator/isis2json/subfield.py
diff --git a/sequences/sentence.py b/14-it-generator/sentence.py
similarity index 85%
rename from sequences/sentence.py
rename to 14-it-generator/sentence.py
index fb866c4..447a192 100644
--- a/sequences/sentence.py
+++ b/14-it-generator/sentence.py
@@ -5,7 +5,7 @@
import re
import reprlib
-RE_WORD = re.compile('\w+')
+RE_WORD = re.compile(r'\w+')
class Sentence:
@@ -17,7 +17,7 @@ def __init__(self, text):
def __getitem__(self, index):
return self.words[index] # <2>
- def __len__(self, index): # <3>
+ def __len__(self): # <3>
return len(self.words)
def __repr__(self):
diff --git a/iterables/sentence.rst b/14-it-generator/sentence.rst
similarity index 100%
rename from iterables/sentence.rst
rename to 14-it-generator/sentence.rst
diff --git a/iterables/sentence_gen.py b/14-it-generator/sentence_gen.py
similarity index 93%
rename from iterables/sentence_gen.py
rename to 14-it-generator/sentence_gen.py
index a17c48f..32a8225 100644
--- a/iterables/sentence_gen.py
+++ b/14-it-generator/sentence_gen.py
@@ -5,7 +5,7 @@
import re
import reprlib
-RE_WORD = re.compile('\w+')
+RE_WORD = re.compile(r'\w+')
class Sentence:
diff --git a/iterables/sentence_gen2.py b/14-it-generator/sentence_gen2.py
similarity index 92%
rename from iterables/sentence_gen2.py
rename to 14-it-generator/sentence_gen2.py
index 8b0f355..b308100 100644
--- a/iterables/sentence_gen2.py
+++ b/14-it-generator/sentence_gen2.py
@@ -5,7 +5,7 @@
import re
import reprlib
-RE_WORD = re.compile('\w+')
+RE_WORD = re.compile(r'\w+')
class Sentence:
diff --git a/iterables/sentence_genexp.py b/14-it-generator/sentence_genexp.py
similarity index 97%
rename from iterables/sentence_genexp.py
rename to 14-it-generator/sentence_genexp.py
index 2919c29..52228de 100644
--- a/iterables/sentence_genexp.py
+++ b/14-it-generator/sentence_genexp.py
@@ -6,7 +6,7 @@
import re
import reprlib
-RE_WORD = re.compile('\w+')
+RE_WORD = re.compile(r'\w+')
class Sentence:
diff --git a/iterables/sentence_iter.py b/14-it-generator/sentence_iter.py
similarity index 98%
rename from iterables/sentence_iter.py
rename to 14-it-generator/sentence_iter.py
index 938d5b4..11b8179 100644
--- a/iterables/sentence_iter.py
+++ b/14-it-generator/sentence_iter.py
@@ -9,7 +9,7 @@
import re
import reprlib
-RE_WORD = re.compile('\w+')
+RE_WORD = re.compile(r'\w+')
class Sentence:
diff --git a/iterables/sentence_iter2.py b/14-it-generator/sentence_iter2.py
similarity index 96%
rename from iterables/sentence_iter2.py
rename to 14-it-generator/sentence_iter2.py
index 8597b32..2663f3f 100644
--- a/iterables/sentence_iter2.py
+++ b/14-it-generator/sentence_iter2.py
@@ -8,7 +8,7 @@
import re
import reprlib
-RE_WORD = re.compile('\w+')
+RE_WORD = re.compile(r'\w+')
class Sentence:
diff --git a/iterables/sentence_runner.py b/14-it-generator/sentence_runner.py
similarity index 100%
rename from iterables/sentence_runner.py
rename to 14-it-generator/sentence_runner.py
diff --git a/iterables/yield_delegate_fail.py b/14-it-generator/yield_delegate_fail.py
similarity index 100%
rename from iterables/yield_delegate_fail.py
rename to 14-it-generator/yield_delegate_fail.py
diff --git a/iterables/yield_delegate_fix.py b/14-it-generator/yield_delegate_fix.py
similarity index 100%
rename from iterables/yield_delegate_fix.py
rename to 14-it-generator/yield_delegate_fix.py
diff --git a/15-context-mngr/README.rst b/15-context-mngr/README.rst
new file mode 100644
index 0000000..7fbc0b4
--- /dev/null
+++ b/15-context-mngr/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 15 - "Context managers and something else"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/control/mirror.py b/15-context-mngr/mirror.py
similarity index 97%
rename from control/mirror.py
rename to 15-context-mngr/mirror.py
index 27f6d75..67782ff 100644
--- a/control/mirror.py
+++ b/15-context-mngr/mirror.py
@@ -15,6 +15,8 @@
YKCOWREBBAJ
>>> what # <4>
'JABBERWOCKY'
+ >>> print('Back to normal.') # <5>
+ Back to normal.
# END MIRROR_DEMO_1
diff --git a/control/mirror_gen.py b/15-context-mngr/mirror_gen.py
similarity index 100%
rename from control/mirror_gen.py
rename to 15-context-mngr/mirror_gen.py
diff --git a/control/mirror_gen_exc.py b/15-context-mngr/mirror_gen_exc.py
similarity index 74%
rename from control/mirror_gen_exc.py
rename to 15-context-mngr/mirror_gen_exc.py
index cfe3b34..9dcc560 100644
--- a/control/mirror_gen_exc.py
+++ b/15-context-mngr/mirror_gen_exc.py
@@ -41,6 +41,10 @@
# END MIRROR_GEN_DEMO_2
The context manager can handle and "swallow" exceptions.
+The following test does not pass under doctest (a
+ZeroDivisionError is reported by doctest) but passes
+if executed by hand in the Python 3 console (the exception
+is handled by the context manager):
# BEGIN MIRROR_GEN_DEMO_3
@@ -52,6 +56,9 @@
...
ytpmuD ytpmuH
Please DO NOT divide by zero!
+
+# END MIRROR_GEN_DEMO_3
+
>>> with looking_glass():
... print('Humpty Dumpty')
... x = no_such_name # <1>
@@ -61,36 +68,34 @@
...
NameError: name 'no_such_name' is not defined
-# END MIRROR_GEN_DEMO_3
+
"""
-# BEGIN MIRROR_GEN_EX
+# BEGIN MIRROR_GEN_EXC
import contextlib
-@contextlib.contextmanager # <1>
+@contextlib.contextmanager
def looking_glass():
import sys
- original_write = sys.stdout.write # <2>
+ original_write = sys.stdout.write
- def reverse_write(text): # <3>
+ def reverse_write(text):
original_write(text[::-1])
- sys.stdout.write = reverse_write # <4>
- msg = ''
+ sys.stdout.write = reverse_write
+ msg = '' # <1>
try:
- yield 'JABBERWOCKY' # <5>
- except ZeroDivisionError: # <6>
- msg = 'Please DO NOT divide by zero!' # <7>
- except:
- raise # <8>
+ yield 'JABBERWOCKY'
+ except ZeroDivisionError: # <2>
+ msg = 'Please DO NOT divide by zero!'
finally:
- sys.stdout.write = original_write # <9>
+ sys.stdout.write = original_write # <3>
if msg:
- print(msg) # <10>
+ print(msg) # <4>
-# END MIRROR_GEN_EX
+# END MIRROR_GEN_EXC
diff --git a/16-coroutine/README.rst b/16-coroutine/README.rst
new file mode 100644
index 0000000..2771530
--- /dev/null
+++ b/16-coroutine/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 16 - "Coroutines"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/16-coroutine/coro_exc_demo.py b/16-coroutine/coro_exc_demo.py
new file mode 100644
index 0000000..5f155bc
--- /dev/null
+++ b/16-coroutine/coro_exc_demo.py
@@ -0,0 +1,66 @@
+"""
+Coroutine closing demonstration::
+
+# BEGIN DEMO_CORO_EXC_1
+ >>> exc_coro = demo_exc_handling()
+ >>> next(exc_coro)
+ -> coroutine started
+ >>> exc_coro.send(11)
+ -> coroutine received: 11
+ >>> exc_coro.send(22)
+ -> coroutine received: 22
+ >>> exc_coro.close()
+ >>> from inspect import getgeneratorstate
+ >>> getgeneratorstate(exc_coro)
+ 'GEN_CLOSED'
+
+# END DEMO_CORO_EXC_1
+
+Coroutine handling exception::
+
+# BEGIN DEMO_CORO_EXC_2
+ >>> exc_coro = demo_exc_handling()
+ >>> next(exc_coro)
+ -> coroutine started
+ >>> exc_coro.send(11)
+ -> coroutine received: 11
+ >>> exc_coro.throw(DemoException)
+ *** DemoException handled. Continuing...
+ >>> getgeneratorstate(exc_coro)
+ 'GEN_SUSPENDED'
+
+# END DEMO_CORO_EXC_2
+
+Coroutine not handling exception::
+
+# BEGIN DEMO_CORO_EXC_3
+ >>> exc_coro = demo_exc_handling()
+ >>> next(exc_coro)
+ -> coroutine started
+ >>> exc_coro.send(11)
+ -> coroutine received: 11
+ >>> exc_coro.throw(ZeroDivisionError)
+ Traceback (most recent call last):
+ ...
+ ZeroDivisionError
+ >>> getgeneratorstate(exc_coro)
+ 'GEN_CLOSED'
+
+# END DEMO_CORO_EXC_3
+"""
+
+# BEGIN EX_CORO_EXC
+class DemoException(Exception):
+ """An exception type for the demonstration."""
+
+def demo_exc_handling():
+ print('-> coroutine started')
+ while True:
+ try:
+ x = yield
+ except DemoException: # <1>
+ print('*** DemoException handled. Continuing...')
+ else: # <2>
+ print('-> coroutine received: {!r}'.format(x))
+ raise RuntimeError('This line should never run.') # <3>
+# END EX_CORO_EXC
diff --git a/16-coroutine/coro_finally_demo.py b/16-coroutine/coro_finally_demo.py
new file mode 100644
index 0000000..a116de1
--- /dev/null
+++ b/16-coroutine/coro_finally_demo.py
@@ -0,0 +1,61 @@
+"""
+Second coroutine closing demonstration::
+
+ >>> fin_coro = demo_finally()
+ >>> next(fin_coro)
+ -> coroutine started
+ >>> fin_coro.send(11)
+ -> coroutine received: 11
+ >>> fin_coro.send(22)
+ -> coroutine received: 22
+ >>> fin_coro.close()
+ -> coroutine ending
+
+
+Second coroutine not handling exception::
+
+ >>> fin_coro = demo_finally()
+ >>> next(fin_coro)
+ -> coroutine started
+ >>> fin_coro.send(11)
+ -> coroutine received: 11
+ >>> fin_coro.throw(ZeroDivisionError) # doctest: +SKIP
+ -> coroutine ending
+ Traceback (most recent call last):
+ File "", line 1, in
+ File "coro_exception_demos.py", line 109, in demo_finally
+ print('-> coroutine received: {!r}'.format(x))
+ ZeroDivisionError
+
+
+The last test above must be skipped because the output '-> coroutine ending'
+is not detected by doctest, which raises a false error. However, if you
+run this file as shown below, you'll see that output "leak" into standard
+output::
+
+
+ $ python3 -m doctest coro_exception_demos.py
+ -> coroutine ending
+
+"""
+
+
+# BEGIN EX_CORO_FINALLY
+class DemoException(Exception):
+ """An exception type for the demonstration."""
+
+
+def demo_finally():
+ print('-> coroutine started')
+ try:
+ while True:
+ try:
+ x = yield
+ except DemoException:
+ print('*** DemoException handled. Continuing...')
+ else:
+ print('-> coroutine received: {!r}'.format(x))
+ finally:
+ print('-> coroutine ending')
+
+# END EX_CORO_FINALLY
diff --git a/16-coroutine/coroaverager0.py b/16-coroutine/coroaverager0.py
new file mode 100644
index 0000000..440a521
--- /dev/null
+++ b/16-coroutine/coroaverager0.py
@@ -0,0 +1,28 @@
+"""
+A coroutine to compute a running average
+
+# BEGIN CORO_AVERAGER_TEST
+ >>> coro_avg = averager() # <1>
+ >>> next(coro_avg) # <2>
+ >>> coro_avg.send(10) # <3>
+ 10.0
+ >>> coro_avg.send(30)
+ 20.0
+ >>> coro_avg.send(5)
+ 15.0
+
+# END CORO_AVERAGER_TEST
+
+"""
+
+# BEGIN CORO_AVERAGER
+def averager():
+ total = 0.0
+ count = 0
+ average = None
+ while True: # <1>
+ term = yield average # <2>
+ total += term
+ count += 1
+ average = total/count
+# END CORO_AVERAGER
diff --git a/16-coroutine/coroaverager1.py b/16-coroutine/coroaverager1.py
new file mode 100644
index 0000000..5443624
--- /dev/null
+++ b/16-coroutine/coroaverager1.py
@@ -0,0 +1,30 @@
+# BEGIN DECORATED_AVERAGER
+"""
+A coroutine to compute a running average
+
+ >>> coro_avg = averager() # <1>
+ >>> from inspect import getgeneratorstate
+ >>> getgeneratorstate(coro_avg) # <2>
+ 'GEN_SUSPENDED'
+ >>> coro_avg.send(10) # <3>
+ 10.0
+ >>> coro_avg.send(30)
+ 20.0
+ >>> coro_avg.send(5)
+ 15.0
+
+"""
+
+from coroutil import coroutine # <4>
+
+@coroutine # <5>
+def averager(): # <6>
+ total = 0.0
+ count = 0
+ average = None
+ while True:
+ term = yield average
+ total += term
+ count += 1
+ average = total/count
+# END DECORATED_AVERAGER
diff --git a/16-coroutine/coroaverager2.py b/16-coroutine/coroaverager2.py
new file mode 100644
index 0000000..8f52300
--- /dev/null
+++ b/16-coroutine/coroaverager2.py
@@ -0,0 +1,61 @@
+"""
+A coroutine to compute a running average.
+
+Testing ``averager`` by itself::
+
+# BEGIN RETURNING_AVERAGER_DEMO1
+
+ >>> coro_avg = averager()
+ >>> next(coro_avg)
+ >>> coro_avg.send(10) # <1>
+ >>> coro_avg.send(30)
+ >>> coro_avg.send(6.5)
+ >>> coro_avg.send(None) # <2>
+ Traceback (most recent call last):
+ ...
+ StopIteration: Result(count=3, average=15.5)
+
+# END RETURNING_AVERAGER_DEMO1
+
+Catching `StopIteration` to extract the value returned by
+the coroutine::
+
+# BEGIN RETURNING_AVERAGER_DEMO2
+
+ >>> coro_avg = averager()
+ >>> next(coro_avg)
+ >>> coro_avg.send(10)
+ >>> coro_avg.send(30)
+ >>> coro_avg.send(6.5)
+ >>> try:
+ ... coro_avg.send(None)
+ ... except StopIteration as exc:
+ ... result = exc.value
+ ...
+ >>> result
+ Result(count=3, average=15.5)
+
+# END RETURNING_AVERAGER_DEMO2
+
+
+"""
+
+# BEGIN RETURNING_AVERAGER
+from collections import namedtuple
+
+Result = namedtuple('Result', 'count average')
+
+
+def averager():
+ total = 0.0
+ count = 0
+ average = None
+ while True:
+ term = yield
+ if term is None:
+ break # <1>
+ total += term
+ count += 1
+ average = total/count
+ return Result(count, average) # <2>
+# END RETURNING_AVERAGER
diff --git a/16-coroutine/coroaverager3.py b/16-coroutine/coroaverager3.py
new file mode 100644
index 0000000..c89a3fd
--- /dev/null
+++ b/16-coroutine/coroaverager3.py
@@ -0,0 +1,107 @@
+"""
+A coroutine to compute a running average.
+
+Testing ``averager`` by itself::
+
+ >>> coro_avg = averager()
+ >>> next(coro_avg)
+ >>> coro_avg.send(10)
+ >>> coro_avg.send(30)
+ >>> coro_avg.send(6.5)
+ >>> coro_avg.send(None)
+ Traceback (most recent call last):
+ ...
+ StopIteration: Result(count=3, average=15.5)
+
+
+Driving it with ``yield from``::
+
+ >>> def summarize(results):
+ ... while True:
+ ... result = yield from averager()
+ ... results.append(result)
+ ...
+ >>> results = []
+ >>> summary = summarize(results)
+ >>> next(summary)
+ >>> for height in data['girls;m']:
+ ... summary.send(height)
+ ...
+ >>> summary.send(None)
+ >>> for height in data['boys;m']:
+ ... summary.send(height)
+ ...
+ >>> summary.send(None)
+ >>> results == [
+ ... Result(count=10, average=1.4279999999999997),
+ ... Result(count=9, average=1.3888888888888888)
+ ... ]
+ True
+
+"""
+
+# BEGIN YIELD_FROM_AVERAGER
+from collections import namedtuple
+
+Result = namedtuple('Result', 'count average')
+
+
+# the subgenerator
+def averager(): # <1>
+ total = 0.0
+ count = 0
+ average = None
+ while True:
+ term = yield # <2>
+ if term is None: # <3>
+ break
+ total += term
+ count += 1
+ average = total/count
+ return Result(count, average) # <4>
+
+
+# the delegating generator
+def grouper(results, key): # <5>
+ while True: # <6>
+ results[key] = yield from averager() # <7>
+
+
+# the client code, a.k.a. the caller
+def main(data): # <8>
+ results = {}
+ for key, values in data.items():
+ group = grouper(results, key) # <9>
+ next(group) # <10>
+ for value in values:
+ group.send(value) # <11>
+ group.send(None) # important! <12>
+
+ # print(results) # uncomment to debug
+ report(results)
+
+
+# output report
+def report(results):
+ for key, result in sorted(results.items()):
+ group, unit = key.split(';')
+ print('{:2} {:5} averaging {:.2f}{}'.format(
+ result.count, group, result.average, unit))
+
+
+data = {
+ 'girls;kg':
+ [40.9, 38.5, 44.3, 42.2, 45.2, 41.7, 44.5, 38.0, 40.6, 44.5],
+ 'girls;m':
+ [1.6, 1.51, 1.4, 1.3, 1.41, 1.39, 1.33, 1.46, 1.45, 1.43],
+ 'boys;kg':
+ [39.0, 40.8, 43.2, 40.8, 43.1, 38.6, 41.4, 40.6, 36.3],
+ 'boys;m':
+ [1.38, 1.5, 1.32, 1.25, 1.37, 1.48, 1.25, 1.49, 1.46],
+}
+
+
+if __name__ == '__main__':
+ main(data)
+
+# END YIELD_FROM_AVERAGER
diff --git a/16-coroutine/coroutil.py b/16-coroutine/coroutil.py
new file mode 100644
index 0000000..a7ee245
--- /dev/null
+++ b/16-coroutine/coroutil.py
@@ -0,0 +1,12 @@
+# BEGIN CORO_DECO
+from functools import wraps
+
+def coroutine(func):
+ """Decorator: primes `func` by advancing to first `yield`"""
+ @wraps(func)
+ def primer(*args,**kwargs): # <1>
+ gen = func(*args,**kwargs) # <2>
+ next(gen) # <3>
+ return gen # <4>
+ return primer
+# END CORO_DECO
diff --git a/16-coroutine/taxi_sim.py b/16-coroutine/taxi_sim.py
new file mode 100644
index 0000000..e9c4cc1
--- /dev/null
+++ b/16-coroutine/taxi_sim.py
@@ -0,0 +1,202 @@
+
+"""
+Taxi simulator
+==============
+
+Driving a taxi from the console::
+
+ >>> from taxi_sim import taxi_process
+ >>> taxi = taxi_process(ident=13, trips=2, start_time=0)
+ >>> next(taxi)
+ Event(time=0, proc=13, action='leave garage')
+ >>> taxi.send(_.time + 7)
+ Event(time=7, proc=13, action='pick up passenger')
+ >>> taxi.send(_.time + 23)
+ Event(time=30, proc=13, action='drop off passenger')
+ >>> taxi.send(_.time + 5)
+ Event(time=35, proc=13, action='pick up passenger')
+ >>> taxi.send(_.time + 48)
+ Event(time=83, proc=13, action='drop off passenger')
+ >>> taxi.send(_.time + 1)
+ Event(time=84, proc=13, action='going home')
+ >>> taxi.send(_.time + 10)
+ Traceback (most recent call last):
+ File "", line 1, in
+ StopIteration
+
+Sample run with two cars, random seed 10. This is a valid doctest::
+
+ >>> main(num_taxis=2, seed=10)
+ taxi: 0 Event(time=0, proc=0, action='leave garage')
+ taxi: 0 Event(time=5, proc=0, action='pick up passenger')
+ taxi: 1 Event(time=5, proc=1, action='leave garage')
+ taxi: 1 Event(time=10, proc=1, action='pick up passenger')
+ taxi: 1 Event(time=15, proc=1, action='drop off passenger')
+ taxi: 0 Event(time=17, proc=0, action='drop off passenger')
+ taxi: 1 Event(time=24, proc=1, action='pick up passenger')
+ taxi: 0 Event(time=26, proc=0, action='pick up passenger')
+ taxi: 0 Event(time=30, proc=0, action='drop off passenger')
+ taxi: 0 Event(time=34, proc=0, action='going home')
+ taxi: 1 Event(time=46, proc=1, action='drop off passenger')
+ taxi: 1 Event(time=48, proc=1, action='pick up passenger')
+ taxi: 1 Event(time=110, proc=1, action='drop off passenger')
+ taxi: 1 Event(time=139, proc=1, action='pick up passenger')
+ taxi: 1 Event(time=140, proc=1, action='drop off passenger')
+ taxi: 1 Event(time=150, proc=1, action='going home')
+ *** end of events ***
+
+See longer sample run at the end of this module.
+
+"""
+
+import random
+import collections
+import queue
+import argparse
+
+DEFAULT_NUMBER_OF_TAXIS = 3
+DEFAULT_END_TIME = 180
+SEARCH_DURATION = 5
+TRIP_DURATION = 20
+DEPARTURE_INTERVAL = 5
+
+Event = collections.namedtuple('Event', 'time proc action')
+
+
+# BEGIN TAXI_PROCESS
+def taxi_process(ident, trips, start_time=0): # <1>
+ """Yield to simulator issuing event at each state change"""
+ time = yield Event(start_time, ident, 'leave garage') # <2>
+ for i in range(trips): # <3>
+ time = yield Event(time, ident, 'pick up passenger') # <4>
+ time = yield Event(time, ident, 'drop off passenger') # <5>
+
+ yield Event(time, ident, 'going home') # <6>
+ # end of taxi process # <7>
+# END TAXI_PROCESS
+
+
+# BEGIN TAXI_SIMULATOR
+class Simulator:
+
+ def __init__(self, procs_map):
+ self.events = queue.PriorityQueue()
+ self.procs = dict(procs_map)
+
+ def run(self, end_time): # <1>
+ """Schedule and display events until time is up"""
+ # schedule the first event for each cab
+ for _, proc in sorted(self.procs.items()): # <2>
+ first_event = next(proc) # <3>
+ self.events.put(first_event) # <4>
+
+ # main loop of the simulation
+ sim_time = 0 # <5>
+ while sim_time < end_time: # <6>
+ if self.events.empty(): # <7>
+ print('*** end of events ***')
+ break
+
+ current_event = self.events.get() # <8>
+ sim_time, proc_id, previous_action = current_event # <9>
+ print('taxi:', proc_id, proc_id * ' ', current_event) # <10>
+ active_proc = self.procs[proc_id] # <11>
+ next_time = sim_time + compute_duration(previous_action) # <12>
+ try:
+ next_event = active_proc.send(next_time) # <13>
+ except StopIteration:
+ del self.procs[proc_id] # <14>
+ else:
+ self.events.put(next_event) # <15>
+ else: # <16>
+ msg = '*** end of simulation time: {} events pending ***'
+ print(msg.format(self.events.qsize()))
+# END TAXI_SIMULATOR
+
+
+def compute_duration(previous_action):
+ """Compute action duration using exponential distribution"""
+ if previous_action in ['leave garage', 'drop off passenger']:
+ # new state is prowling
+ interval = SEARCH_DURATION
+ elif previous_action == 'pick up passenger':
+ # new state is trip
+ interval = TRIP_DURATION
+ elif previous_action == 'going home':
+ interval = 1
+ else:
+ raise ValueError('Unknown previous_action: %s' % previous_action)
+ return int(random.expovariate(1/interval)) + 1
+
+
+def main(end_time=DEFAULT_END_TIME, num_taxis=DEFAULT_NUMBER_OF_TAXIS,
+ seed=None):
+ """Initialize random generator, build procs and run simulation"""
+ if seed is not None:
+ random.seed(seed) # get reproducible results
+
+ taxis = {i: taxi_process(i, (i+1)*2, i*DEPARTURE_INTERVAL)
+ for i in range(num_taxis)}
+ sim = Simulator(taxis)
+ sim.run(end_time)
+
+
+if __name__ == '__main__':
+
+ parser = argparse.ArgumentParser(
+ description='Taxi fleet simulator.')
+ parser.add_argument('-e', '--end-time', type=int,
+ default=DEFAULT_END_TIME,
+ help='simulation end time; default = %s'
+ % DEFAULT_END_TIME)
+ parser.add_argument('-t', '--taxis', type=int,
+ default=DEFAULT_NUMBER_OF_TAXIS,
+ help='number of taxis running; default = %s'
+ % DEFAULT_NUMBER_OF_TAXIS)
+ parser.add_argument('-s', '--seed', type=int, default=None,
+ help='random generator seed (for testing)')
+
+ args = parser.parse_args()
+ main(args.end_time, args.taxis, args.seed)
+
+
+"""
+
+Sample run from the command line, seed=3, maximum elapsed time=120::
+
+# BEGIN TAXI_SAMPLE_RUN
+$ python3 taxi_sim.py -s 3 -e 120
+taxi: 0 Event(time=0, proc=0, action='leave garage')
+taxi: 0 Event(time=2, proc=0, action='pick up passenger')
+taxi: 1 Event(time=5, proc=1, action='leave garage')
+taxi: 1 Event(time=8, proc=1, action='pick up passenger')
+taxi: 2 Event(time=10, proc=2, action='leave garage')
+taxi: 2 Event(time=15, proc=2, action='pick up passenger')
+taxi: 2 Event(time=17, proc=2, action='drop off passenger')
+taxi: 0 Event(time=18, proc=0, action='drop off passenger')
+taxi: 2 Event(time=18, proc=2, action='pick up passenger')
+taxi: 2 Event(time=25, proc=2, action='drop off passenger')
+taxi: 1 Event(time=27, proc=1, action='drop off passenger')
+taxi: 2 Event(time=27, proc=2, action='pick up passenger')
+taxi: 0 Event(time=28, proc=0, action='pick up passenger')
+taxi: 2 Event(time=40, proc=2, action='drop off passenger')
+taxi: 2 Event(time=44, proc=2, action='pick up passenger')
+taxi: 1 Event(time=55, proc=1, action='pick up passenger')
+taxi: 1 Event(time=59, proc=1, action='drop off passenger')
+taxi: 0 Event(time=65, proc=0, action='drop off passenger')
+taxi: 1 Event(time=65, proc=1, action='pick up passenger')
+taxi: 2 Event(time=65, proc=2, action='drop off passenger')
+taxi: 2 Event(time=72, proc=2, action='pick up passenger')
+taxi: 0 Event(time=76, proc=0, action='going home')
+taxi: 1 Event(time=80, proc=1, action='drop off passenger')
+taxi: 1 Event(time=88, proc=1, action='pick up passenger')
+taxi: 2 Event(time=95, proc=2, action='drop off passenger')
+taxi: 2 Event(time=97, proc=2, action='pick up passenger')
+taxi: 2 Event(time=98, proc=2, action='drop off passenger')
+taxi: 1 Event(time=106, proc=1, action='drop off passenger')
+taxi: 2 Event(time=109, proc=2, action='going home')
+taxi: 1 Event(time=110, proc=1, action='going home')
+*** end of events ***
+# END TAXI_SAMPLE_RUN
+
+"""
diff --git a/16-coroutine/taxi_sim0.py b/16-coroutine/taxi_sim0.py
new file mode 100644
index 0000000..4078fdd
--- /dev/null
+++ b/16-coroutine/taxi_sim0.py
@@ -0,0 +1,257 @@
+
+"""
+Taxi simulator
+
+Sample run with two cars, random seed 10. This is a valid doctest.
+
+ >>> main(num_taxis=2, seed=10)
+ taxi: 0 Event(time=0, proc=0, action='leave garage')
+ taxi: 0 Event(time=4, proc=0, action='pick up passenger')
+ taxi: 1 Event(time=5, proc=1, action='leave garage')
+ taxi: 1 Event(time=9, proc=1, action='pick up passenger')
+ taxi: 0 Event(time=10, proc=0, action='drop off passenger')
+ taxi: 1 Event(time=12, proc=1, action='drop off passenger')
+ taxi: 0 Event(time=17, proc=0, action='pick up passenger')
+ taxi: 1 Event(time=19, proc=1, action='pick up passenger')
+ taxi: 1 Event(time=21, proc=1, action='drop off passenger')
+ taxi: 1 Event(time=24, proc=1, action='pick up passenger')
+ taxi: 0 Event(time=28, proc=0, action='drop off passenger')
+ taxi: 1 Event(time=28, proc=1, action='drop off passenger')
+ taxi: 0 Event(time=29, proc=0, action='going home')
+ taxi: 1 Event(time=30, proc=1, action='pick up passenger')
+ taxi: 1 Event(time=61, proc=1, action='drop off passenger')
+ taxi: 1 Event(time=62, proc=1, action='going home')
+ *** end of events ***
+
+See explanation and longer sample run at the end of this module.
+
+"""
+
+import sys
+import random
+import collections
+import queue
+import argparse
+
+DEFAULT_NUMBER_OF_TAXIS = 3
+DEFAULT_END_TIME = 80
+SEARCH_DURATION = 4
+TRIP_DURATION = 10
+DEPARTURE_INTERVAL = 5
+
+Event = collections.namedtuple('Event', 'time proc action')
+
+
+def compute_delay(interval):
+ """Compute action delay using exponential distribution"""
+ return int(random.expovariate(1/interval)) + 1
+
+# BEGIN TAXI_PROCESS
+def taxi_process(ident, trips, start_time=0): # <1>
+ """Yield to simulator issuing event at each state change"""
+ time = yield Event(start_time, ident, 'leave garage') # <2>
+ for i in range(trips): # <3>
+ prowling_ends = time + compute_delay(SEARCH_DURATION) # <4>
+ time = yield Event(prowling_ends, ident, 'pick up passenger') # <5>
+
+ trip_ends = time + compute_delay(TRIP_DURATION) # <6>
+ time = yield Event(trip_ends, ident, 'drop off passenger') # <7>
+
+ yield Event(time + 1, ident, 'going home') # <8>
+ # end of taxi process # <9>
+# END TAXI_PROCESS
+
+# BEGIN TAXI_SIMULATOR
+class Simulator:
+
+ def __init__(self, procs_map):
+ self.events = queue.PriorityQueue()
+ self.procs = dict(procs_map)
+
+
+ def run(self, end_time): # <1>
+ """Schedule and display events until time is up"""
+ # schedule the first event for each cab
+ for _, proc in sorted(self.procs.items()): # <2>
+ first_event = next(proc) # <3>
+ self.events.put(first_event) # <4>
+
+ # main loop of the simulation
+ time = 0
+ while time < end_time: # <5>
+ if self.events.empty(): # <6>
+ print('*** end of events ***')
+ break
+
+ # get and display current event
+ current_event = self.events.get() # <7>
+ print('taxi:', current_event.proc, # <8>
+ current_event.proc * ' ', current_event)
+
+ # schedule next action for current proc
+ time = current_event.time # <9>
+ proc = self.procs[current_event.proc] # <10>
+ try:
+ next_event = proc.send(time) # <11>
+ except StopIteration:
+ del self.procs[current_event.proc] # <12>
+ else:
+ self.events.put(next_event) # <13>
+ else: # <14>
+ msg = '*** end of simulation time: {} events pending ***'
+ print(msg.format(self.events.qsize()))
+# END TAXI_SIMULATOR
+
+def main(end_time=DEFAULT_END_TIME, num_taxis=DEFAULT_NUMBER_OF_TAXIS,
+ seed=None):
+ """Initialize random generator, build procs and run simulation"""
+ if seed is not None:
+ random.seed(seed) # get reproducible results
+
+ taxis = {i: taxi_process(i, (i+1)*2, i*DEPARTURE_INTERVAL)
+ for i in range(num_taxis)}
+ sim = Simulator(taxis)
+ sim.run(end_time)
+
+
+if __name__ == '__main__':
+
+ parser = argparse.ArgumentParser(
+ description='Taxi fleet simulator.')
+ parser.add_argument('-e', '--end-time', type=int,
+ default=DEFAULT_END_TIME,
+ help='simulation end time; default = %s'
+ % DEFAULT_END_TIME)
+ parser.add_argument('-t', '--taxis', type=int,
+ default=DEFAULT_NUMBER_OF_TAXIS,
+ help='number of taxis running; default = %s'
+ % DEFAULT_NUMBER_OF_TAXIS)
+ parser.add_argument('-s', '--seed', type=int, default=None,
+ help='random generator seed (for testing)')
+
+ args = parser.parse_args()
+ main(args.end_time, args.taxis, args.seed)
+
+
+"""
+Notes for the ``taxi_process`` coroutine::
+
+<1> `taxi_process` will be called once per taxi, creating a generator
+ object to represent its operations. `ident` is the number of the taxi
+ (eg. 0, 1, 2 in the sample run); `trips` is the number of trips this
+ taxi will make before going home; `start_time` is when the taxi
+ leaves the garage.
+
+<2> The first `Event` yielded is `'leave garage'`. This suspends the
+ coroutine, and lets the simulation main loop proceed to the next
+ scheduled event. When it's time to reactivate this process, the main
+ loop will `send` the current simulation time, which is assigned to
+ `time`.
+
+<3> This block will be repeated once for each trip.
+
+<4> The ending time of the search for a passenger is computed.
+
+<5> An `Event` signaling passenger pick up is yielded. The coroutine
+ pauses here. When the time comes to reactivate this coroutine,
+ the main loop will again `send` the current time.
+
+<6> The ending time of the trip is computed, taking into account the
+ current `time`.
+
+<7> An `Event` signaling passenger drop off is yielded. Coroutine
+ suspended again, waiting for the main loop to send the time of when
+ it's time to continue.
+
+<8> The `for` loop ends after the given number of trips, and a final
+ `'going home'` event is yielded, to happen 1 minute after the current
+ time. The coroutine will suspend for the last time. When reactivated,
+ it will be sent the time from the simulation main loop, but here I
+ don't assign it to any variable because it will not be useful.
+
+<9> When the coroutine falls off the end, the coroutine object raises
+ `StopIteration`.
+
+
+Notes for the ``Simulator.run`` method::
+
+<1> The simulation `end_time` is the only required argument for `run`.
+
+<2> Use `sorted` to retrieve the `self.procs` items ordered by the
+ integer key; we don't care about the key, so assign it to `_`.
+
+<3> `next(proc)` primes each coroutine by advancing it to the first
+ yield, so it's ready to be sent data. An `Event` is yielded.
+
+<4> Add each event to the `self.events` `PriorityQueue`. The first
+ event for each taxi is `'leave garage'`, as seen in the sample run
+ (ex_taxi_process>>).
+
+<5> Main loop of the simulation: run until the current `time` equals
+ or exceeds the `end_time`.
+
+<6> The main loop may also exit if there are no pending events in the
+ queue.
+
+<7> Get `Event` with the smallest `time` in the queue; this is the
+ `current_event`.
+
+<8> Display the `Event`, identifying the taxi and adding indentation
+ according to the taxi id.
+
+<9> Update the simulation time with the time of the `current_event`.
+
+<10> Retrieve the coroutine for this taxi from the `self.procs`
+ dictionary.
+
+<11> Send the `time` to the coroutine. The coroutine will yield the
+ `next_event` or raise `StopIteration` it's finished.
+
+<12> If `StopIteration` was raised, delete the coroutine from the
+ `self.procs` dictionary.
+
+<13> Otherwise, put the `next_event` in the queue.
+
+<14> If the loop exits because the simulation time passed, display the
+ number of events pending (which may be zero by coincidence,
+ sometimes).
+
+
+Sample run from the command line, seed=24, total elapsed time=160::
+
+# BEGIN TAXI_SAMPLE_RUN
+$ python3 taxi_sim.py -s 24 -e 160
+taxi: 0 Event(time=0, proc=0, action='leave garage')
+taxi: 0 Event(time=5, proc=0, action='pick up passenger')
+taxi: 1 Event(time=5, proc=1, action='leave garage')
+taxi: 1 Event(time=6, proc=1, action='pick up passenger')
+taxi: 2 Event(time=10, proc=2, action='leave garage')
+taxi: 2 Event(time=11, proc=2, action='pick up passenger')
+taxi: 2 Event(time=23, proc=2, action='drop off passenger')
+taxi: 0 Event(time=24, proc=0, action='drop off passenger')
+taxi: 2 Event(time=24, proc=2, action='pick up passenger')
+taxi: 2 Event(time=26, proc=2, action='drop off passenger')
+taxi: 0 Event(time=30, proc=0, action='pick up passenger')
+taxi: 2 Event(time=31, proc=2, action='pick up passenger')
+taxi: 0 Event(time=43, proc=0, action='drop off passenger')
+taxi: 0 Event(time=44, proc=0, action='going home')
+taxi: 2 Event(time=46, proc=2, action='drop off passenger')
+taxi: 2 Event(time=49, proc=2, action='pick up passenger')
+taxi: 1 Event(time=70, proc=1, action='drop off passenger')
+taxi: 2 Event(time=70, proc=2, action='drop off passenger')
+taxi: 2 Event(time=71, proc=2, action='pick up passenger')
+taxi: 2 Event(time=79, proc=2, action='drop off passenger')
+taxi: 1 Event(time=88, proc=1, action='pick up passenger')
+taxi: 2 Event(time=92, proc=2, action='pick up passenger')
+taxi: 2 Event(time=98, proc=2, action='drop off passenger')
+taxi: 2 Event(time=99, proc=2, action='going home')
+taxi: 1 Event(time=102, proc=1, action='drop off passenger')
+taxi: 1 Event(time=104, proc=1, action='pick up passenger')
+taxi: 1 Event(time=135, proc=1, action='drop off passenger')
+taxi: 1 Event(time=136, proc=1, action='pick up passenger')
+taxi: 1 Event(time=151, proc=1, action='drop off passenger')
+taxi: 1 Event(time=152, proc=1, action='going home')
+*** end of events ***
+# END TAXI_SAMPLE_RUN
+
+"""
diff --git a/16-coroutine/taxi_sim_delay.py b/16-coroutine/taxi_sim_delay.py
new file mode 100644
index 0000000..8f38e4f
--- /dev/null
+++ b/16-coroutine/taxi_sim_delay.py
@@ -0,0 +1,215 @@
+
+"""
+Taxi simulator with delay on output
+===================================
+
+This is a variation of ``taxi_sim.py`` which adds a ``-d`` comand-line
+option. When given, that option adds a delay in the main loop, pausing
+the simulation for .5s for each "minute" of simulation time.
+
+
+Driving a taxi from the console::
+
+ >>> from taxi_sim import taxi_process
+ >>> taxi = taxi_process(ident=13, trips=2, start_time=0)
+ >>> next(taxi)
+ Event(time=0, proc=13, action='leave garage')
+ >>> taxi.send(_.time + 7)
+ Event(time=7, proc=13, action='pick up passenger')
+ >>> taxi.send(_.time + 23)
+ Event(time=30, proc=13, action='drop off passenger')
+ >>> taxi.send(_.time + 5)
+ Event(time=35, proc=13, action='pick up passenger')
+ >>> taxi.send(_.time + 48)
+ Event(time=83, proc=13, action='drop off passenger')
+ >>> taxi.send(_.time + 1)
+ Event(time=84, proc=13, action='going home')
+ >>> taxi.send(_.time + 10)
+ Traceback (most recent call last):
+ File "", line 1, in
+ StopIteration
+
+Sample run with two cars, random seed 10. This is a valid doctest::
+
+ >>> main(num_taxis=2, seed=10)
+ taxi: 0 Event(time=0, proc=0, action='leave garage')
+ taxi: 0 Event(time=5, proc=0, action='pick up passenger')
+ taxi: 1 Event(time=5, proc=1, action='leave garage')
+ taxi: 1 Event(time=10, proc=1, action='pick up passenger')
+ taxi: 1 Event(time=15, proc=1, action='drop off passenger')
+ taxi: 0 Event(time=17, proc=0, action='drop off passenger')
+ taxi: 1 Event(time=24, proc=1, action='pick up passenger')
+ taxi: 0 Event(time=26, proc=0, action='pick up passenger')
+ taxi: 0 Event(time=30, proc=0, action='drop off passenger')
+ taxi: 0 Event(time=34, proc=0, action='going home')
+ taxi: 1 Event(time=46, proc=1, action='drop off passenger')
+ taxi: 1 Event(time=48, proc=1, action='pick up passenger')
+ taxi: 1 Event(time=110, proc=1, action='drop off passenger')
+ taxi: 1 Event(time=139, proc=1, action='pick up passenger')
+ taxi: 1 Event(time=140, proc=1, action='drop off passenger')
+ taxi: 1 Event(time=150, proc=1, action='going home')
+ *** end of events ***
+
+See longer sample run at the end of this module.
+
+"""
+
+import random
+import collections
+import queue
+import argparse
+import time
+
+DEFAULT_NUMBER_OF_TAXIS = 3
+DEFAULT_END_TIME = 180
+SEARCH_DURATION = 5
+TRIP_DURATION = 20
+DEPARTURE_INTERVAL = 5
+
+Event = collections.namedtuple('Event', 'time proc action')
+
+
+# BEGIN TAXI_PROCESS
+def taxi_process(ident, trips, start_time=0): # <1>
+ """Yield to simulator issuing event at each state change"""
+ time = yield Event(start_time, ident, 'leave garage') # <2>
+ for i in range(trips): # <3>
+ time = yield Event(time, ident, 'pick up passenger') # <4>
+ time = yield Event(time, ident, 'drop off passenger') # <5>
+
+ yield Event(time, ident, 'going home') # <6>
+ # end of taxi process # <7>
+# END TAXI_PROCESS
+
+
+# BEGIN TAXI_SIMULATOR
+class Simulator:
+
+ def __init__(self, procs_map):
+ self.events = queue.PriorityQueue()
+ self.procs = dict(procs_map)
+
+ def run(self, end_time, delay=False): # <1>
+ """Schedule and display events until time is up"""
+ # schedule the first event for each cab
+ for _, proc in sorted(self.procs.items()): # <2>
+ first_event = next(proc) # <3>
+ self.events.put(first_event) # <4>
+
+ # main loop of the simulation
+ sim_time = 0 # <5>
+ while sim_time < end_time: # <6>
+ if self.events.empty(): # <7>
+ print('*** end of events ***')
+ break
+
+ # get and display current event
+ current_event = self.events.get() # <8>
+ if delay:
+ time.sleep((current_event.time - sim_time) / 2)
+ # update the simulation time
+ sim_time, proc_id, previous_action = current_event
+ print('taxi:', proc_id, proc_id * ' ', current_event)
+ active_proc = self.procs[proc_id]
+ # schedule next action for current proc
+ next_time = sim_time + compute_duration(previous_action)
+ try:
+ next_event = active_proc.send(next_time) # <12>
+ except StopIteration:
+ del self.procs[proc_id] # <13>
+ else:
+ self.events.put(next_event) # <14>
+ else: # <15>
+ msg = '*** end of simulation time: {} events pending ***'
+ print(msg.format(self.events.qsize()))
+# END TAXI_SIMULATOR
+
+
+def compute_duration(previous_action):
+ """Compute action duration using exponential distribution"""
+ if previous_action in ['leave garage', 'drop off passenger']:
+ # new state is prowling
+ interval = SEARCH_DURATION
+ elif previous_action == 'pick up passenger':
+ # new state is trip
+ interval = TRIP_DURATION
+ elif previous_action == 'going home':
+ interval = 1
+ else:
+ raise ValueError('Unknown previous_action: %s' % previous_action)
+ return int(random.expovariate(1/interval)) + 1
+
+
+def main(end_time=DEFAULT_END_TIME, num_taxis=DEFAULT_NUMBER_OF_TAXIS,
+ seed=None, delay=False):
+ """Initialize random generator, build procs and run simulation"""
+ if seed is not None:
+ random.seed(seed) # get reproducible results
+
+ taxis = {i: taxi_process(i, (i+1)*2, i*DEPARTURE_INTERVAL)
+ for i in range(num_taxis)}
+ sim = Simulator(taxis)
+ sim.run(end_time, delay)
+
+
+if __name__ == '__main__':
+
+ parser = argparse.ArgumentParser(
+ description='Taxi fleet simulator.')
+ parser.add_argument('-e', '--end-time', type=int,
+ default=DEFAULT_END_TIME,
+ help='simulation end time; default = %s'
+ % DEFAULT_END_TIME)
+ parser.add_argument('-t', '--taxis', type=int,
+ default=DEFAULT_NUMBER_OF_TAXIS,
+ help='number of taxis running; default = %s'
+ % DEFAULT_NUMBER_OF_TAXIS)
+ parser.add_argument('-s', '--seed', type=int, default=None,
+ help='random generator seed (for testing)')
+ parser.add_argument('-d', '--delay', action='store_true',
+ help='introduce delay proportional to simulation time')
+
+ args = parser.parse_args()
+ main(args.end_time, args.taxis, args.seed, args.delay)
+
+
+"""
+
+Sample run from the command line, seed=3, maximum elapsed time=120::
+
+# BEGIN TAXI_SAMPLE_RUN
+$ python3 taxi_sim.py -s 3 -e 120
+taxi: 0 Event(time=0, proc=0, action='leave garage')
+taxi: 0 Event(time=2, proc=0, action='pick up passenger')
+taxi: 1 Event(time=5, proc=1, action='leave garage')
+taxi: 1 Event(time=8, proc=1, action='pick up passenger')
+taxi: 2 Event(time=10, proc=2, action='leave garage')
+taxi: 2 Event(time=15, proc=2, action='pick up passenger')
+taxi: 2 Event(time=17, proc=2, action='drop off passenger')
+taxi: 0 Event(time=18, proc=0, action='drop off passenger')
+taxi: 2 Event(time=18, proc=2, action='pick up passenger')
+taxi: 2 Event(time=25, proc=2, action='drop off passenger')
+taxi: 1 Event(time=27, proc=1, action='drop off passenger')
+taxi: 2 Event(time=27, proc=2, action='pick up passenger')
+taxi: 0 Event(time=28, proc=0, action='pick up passenger')
+taxi: 2 Event(time=40, proc=2, action='drop off passenger')
+taxi: 2 Event(time=44, proc=2, action='pick up passenger')
+taxi: 1 Event(time=55, proc=1, action='pick up passenger')
+taxi: 1 Event(time=59, proc=1, action='drop off passenger')
+taxi: 0 Event(time=65, proc=0, action='drop off passenger')
+taxi: 1 Event(time=65, proc=1, action='pick up passenger')
+taxi: 2 Event(time=65, proc=2, action='drop off passenger')
+taxi: 2 Event(time=72, proc=2, action='pick up passenger')
+taxi: 0 Event(time=76, proc=0, action='going home')
+taxi: 1 Event(time=80, proc=1, action='drop off passenger')
+taxi: 1 Event(time=88, proc=1, action='pick up passenger')
+taxi: 2 Event(time=95, proc=2, action='drop off passenger')
+taxi: 2 Event(time=97, proc=2, action='pick up passenger')
+taxi: 2 Event(time=98, proc=2, action='drop off passenger')
+taxi: 1 Event(time=106, proc=1, action='drop off passenger')
+taxi: 2 Event(time=109, proc=2, action='going home')
+taxi: 1 Event(time=110, proc=1, action='going home')
+*** end of events ***
+# END TAXI_SAMPLE_RUN
+
+"""
diff --git a/16-coroutine/yield_from_expansion.py b/16-coroutine/yield_from_expansion.py
new file mode 100644
index 0000000..2c3ba5e
--- /dev/null
+++ b/16-coroutine/yield_from_expansion.py
@@ -0,0 +1,52 @@
+# Code below is the expansion of the statement:
+#
+# RESULT = yield from EXPR
+#
+# Copied verbatim from the Formal Semantics section of
+# PEP 380 -- Syntax for Delegating to a Subgenerator
+#
+# https://www.python.org/dev/peps/pep-0380/#formal-semantics
+
+
+# BEGIN YIELD_FROM_EXPANSION
+_i = iter(EXPR) # <1>
+try:
+ _y = next(_i) # <2>
+except StopIteration as _e:
+ _r = _e.value # <3>
+else:
+ while 1: # <4>
+ try:
+ _s = yield _y # <5>
+ except GeneratorExit as _e: # <6>
+ try:
+ _m = _i.close
+ except AttributeError:
+ pass
+ else:
+ _m()
+ raise _e
+ except BaseException as _e: # <7>
+ _x = sys.exc_info()
+ try:
+ _m = _i.throw
+ except AttributeError:
+ raise _e
+ else: # <8>
+ try:
+ _y = _m(*_x)
+ except StopIteration as _e:
+ _r = _e.value
+ break
+ else: # <9>
+ try: # <10>
+ if _s is None: # <11>
+ _y = next(_i)
+ else:
+ _y = _i.send(_s)
+ except StopIteration as _e: # <12>
+ _r = _e.value
+ break
+
+RESULT = _r # <13>
+# END YIELD_FROM_EXPANSION
diff --git a/16-coroutine/yield_from_expansion_simplified.py b/16-coroutine/yield_from_expansion_simplified.py
new file mode 100644
index 0000000..2da7fe0
--- /dev/null
+++ b/16-coroutine/yield_from_expansion_simplified.py
@@ -0,0 +1,32 @@
+# Code below is a very simplified expansion of the statement:
+#
+# RESULT = yield from EXPR
+#
+# This code assumes that the subgenerator will run to completion,
+# without the client ever calling ``.throw()`` or ``.close()``.
+# Also, this code makes no distinction between the client
+# calling ``next(subgen)`` or ``subgen.send(...)``
+#
+# The full expansion is in:
+# PEP 380 -- Syntax for Delegating to a Subgenerator
+#
+# https://www.python.org/dev/peps/pep-0380/#formal-semantics
+
+
+# BEGIN YIELD_FROM_EXPANSION_SIMPLIFIED
+_i = iter(EXPR) # <1>
+try:
+ _y = next(_i) # <2>
+except StopIteration as _e:
+ _r = _e.value # <3>
+else:
+ while 1: # <4>
+ _s = yield _y # <5>
+ try:
+ _y = _i.send(_s) # <6>
+ except StopIteration as _e: # <7>
+ _r = _e.value
+ break
+
+RESULT = _r # <8>
+# END YIELD_FROM_EXPANSION_SIMPLIFIED
diff --git a/17-futures-py3.7/README.rst b/17-futures-py3.7/README.rst
new file mode 100644
index 0000000..7167b33
--- /dev/null
+++ b/17-futures-py3.7/README.rst
@@ -0,0 +1,10 @@
+Updated sample code for Chapter 17 - "Concurrency with futures"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
+
+ This directory contains code updated to run with Python 3.7 and
+ **aiohttp** 3.5. When the first edition of "Fluent Python" was
+ written, the **asyncio** package was provisional, and the latest
+ version of **aiohttp** was 0.13.1. The API for both packages had
+ significant breaking changes.
diff --git a/17-futures-py3.7/countries/.gitignore b/17-futures-py3.7/countries/.gitignore
new file mode 100644
index 0000000..8ea4ee7
--- /dev/null
+++ b/17-futures-py3.7/countries/.gitignore
@@ -0,0 +1 @@
+flags/
diff --git a/17-futures-py3.7/countries/README.rst b/17-futures-py3.7/countries/README.rst
new file mode 100644
index 0000000..0f29b01
--- /dev/null
+++ b/17-futures-py3.7/countries/README.rst
@@ -0,0 +1,178 @@
+============================
+Setting up Nginx and Vaurien
+============================
+
+This text explains how to configure Nginx and Vaurien to test HTTP client code while avoiding network traffic and introducing simulated delays and errors. This setup is necessary if you want to experiment with the ``flags2*.py`` image download examples in this directory -- covered in chapters 17 and 18 of Fluent Python.
+
+
+Overview
+========
+
+The flag download examples are designed to compare the performance of different approaches to finding and downloading files from the Web. However, we don't want to hit a public server with multiple requests per second while testing, and we want to be able to simulate high latency and random network errors.
+
+For this setup I chose Nginx as the HTTP server because it is very fast and easy to configure, and Toxiproxy — designed by Shopify to introduce delays and network errors for testing distributed systems.
+
+The archive ``flags.zip``, contains a directory ``flags/`` with 194 subdirectories, each containing a ``.gif`` image and a ``metadata.json`` file. These are public-domain images copied from the `CIA World Fact Book `_.
+
+Once these files are unpacked to the ``flags/`` directory and Nginx is configured, you can experiment with the ``flags2*.py`` examples without hitting the network.
+
+
+Procedure
+=========
+
+1. Unpack test data
+-------------------
+
+The instructions in this section are for GNU/Linux or OSX using the command line. Windows users should have no difficulty doing the same operations with the Windows Exporer GUI.
+
+Unpack the initial data in the ``fancy_flags/`` directory::
+
+ $ unzip flags.zip
+ ... many lines omitted ...
+ creating: flags/zw/
+ inflating: flags/zw/metadata.json
+ inflating: flags/zw/zw.gif
+
+
+Verify that 194 directories are created in ``fancy_flags/flags/``, each with a ``.gif`` and a ``metadata.json`` file::
+
+
+ $ ls flags | wc -w
+ 194
+ $ find flags | grep .gif | wc -l
+ 194
+ $ find flags | grep .json | wc -l
+ 194
+ $ ls flags/ad
+ ad.gif metadata.json
+
+
+2. Install Nginx
+----------------
+
+Download and install Nginx. I used version 1.6.2 -- the latest stable version as I write this.
+
+* Download page: http://nginx.org/en/download.html
+
+* Beginner's guide: http://nginx.org/en/docs/beginners_guide.html
+
+
+3. Configure Nginx
+------------------
+
+Edit the the ``nginx.conf`` file to set the port and document root. You can determine which ``nginx.conf`` is in use by running::
+
+
+ $ nginx -V
+
+
+The output starts with::
+
+ nginx version: nginx/1.6.2
+ built by clang 6.0 (clang-600.0.51) (based on LLVM 3.5svn)
+ TLS SNI support enabled
+ configure arguments:...
+
+
+Among the configure arguments you'll see ``--conf-path=``. That's the file you will edit.
+
+Most of the content in ``nginx.conf`` is within a block labeled ``http`` and enclosed in curly braces. Within that block there can be multiple blocks labeled ``server``. Add another ``server`` block like this one::
+
+
+ server {
+ listen 8001;
+
+ location /flags/ {
+ root /full-path-to.../countries/;
+ }
+ }
+
+
+After editing ``nginx.conf`` the server must be started (if it's not running) or told to reload the configuration file::
+
+
+ $ nginx # to start, if necessary
+ $ nginx -s reload # to reload the configuration
+
+
+To test the configuration, open the URL http://localhost:8001/flags/ad/ad.gif in a browser. You should see the blue, yellow and red flag of Andorra.
+
+If the test fails, please double check the procedure just described and refer to the Nginx documentation.
+
+At this point you may run the ``flags_*2.py`` examples against the Nginx install by providing the ``--server LOCAL`` command line option. For example::
+
+
+ $ python3 flags2_threadpool.py -s LOCAL
+ LOCAL site: http://localhost:8001/flags
+ Searching for 20 flags: from BD to VN
+ 20 concurrent connections will be used.
+ --------------------
+ 20 flags downloaded.
+ Elapsed time: 0.09s
+
+
+Note that Nginx is so fast that you will not see much difference in run time between the sequential and the concurrent versions. For more realistic testing with simulated network lag, we need to set up Toxiproxy.
+
+
+4. Install and run Toxiproxy
+----------------------------
+
+Install...
+
+In one terminal:
+
+ $ toxiproxy-server
+
+In another terminal:
+
+ $ toxiproxy-cli create nginx_flags_delay -l localhost:8002 -u localhost:8001
+ Created new proxy nginx_flags_delay
+ $ toxiproxy-cli toxic add nginx_flags_delay -t latency -a latency=500
+ Added downstream latency toxic 'latency_downstream' on proxy 'nginx_flags_delay'
+
+
+This creates an HTTP proxy on port 8002 which adds a 0.5s delay to every response. You can test it with a browser on port 8002: http://localhost:8002/flags/ad/ad.gif -- the flag of Andorra should appear after ½ second.
+
+TODO: UPDATE NEXT PARAGRAPH
+
+There is also the ``XXX`` script which runs a proxy on port 8003 producing errors in 25% of the responses and a .5 s delay to 50% of the responses. You can also test it with the browser on port 8003, but rememeber that errors are expected.
+
+
+Platform-specific instructions
+==============================
+
+
+Nginx setup on Mac OS X
+------------------------
+
+Homebrew (copy & paste code at the bottom of http://brew.sh/)::
+
+
+ $ ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
+ $ brew doctor
+ $ brew install nginx
+
+
+Download and unpack::
+
+Docroot is: /usr/local/var/www
+/usr/local/etc/nginx/nginx.conf
+
+
+::
+
+ To have launchd start nginx at login:
+ ln -sfv /usr/local/opt/nginx/*.plist ~/Library/LaunchAgents
+ Then to load nginx now:
+ launchctl load ~/Library/LaunchAgents/homebrew.mxcl.nginx.plist
+ Or, if you don't want/need launchctl, you can just run:
+ nginx
+
+
+
+Nginx setup on Lubuntu 14.04.1 LTS
+----------------------------------
+
+Docroot is: /usr/share/nginx/html
+
+
diff --git a/17-futures-py3.7/countries/country_codes.txt b/17-futures-py3.7/countries/country_codes.txt
new file mode 100644
index 0000000..72c37f0
--- /dev/null
+++ b/17-futures-py3.7/countries/country_codes.txt
@@ -0,0 +1,8 @@
+AD AE AF AG AL AM AO AR AT AU AZ BA BB BD BE BF BG BH BI BJ BN BO BR BS BT
+BW BY BZ CA CD CF CG CH CI CL CM CN CO CR CU CV CY CZ DE DJ DK DM DZ EC EE
+EG ER ES ET FI FJ FM FR GA GB GD GE GH GM GN GQ GR GT GW GY HN HR HT HU ID
+IE IL IN IQ IR IS IT JM JO JP KE KG KH KI KM KN KP KR KW KZ LA LB LC LI LK
+LR LS LT LU LV LY MA MC MD ME MG MH MK ML MM MN MR MT MU MV MW MX MY MZ NA
+NE NG NI NL NO NP NR NZ OM PA PE PG PH PK PL PT PW PY QA RO RS RU RW SA SB
+SC SD SE SG SI SK SL SM SN SO SR SS ST SV SY SZ TD TG TH TJ TL TM TN TO TR
+TT TV TW TZ UA UG US UY UZ VA VC VE VN VU WS YE ZA ZM ZW
diff --git a/17-futures-py3.7/countries/flags.py b/17-futures-py3.7/countries/flags.py
new file mode 100644
index 0000000..7a7f854
--- /dev/null
+++ b/17-futures-py3.7/countries/flags.py
@@ -0,0 +1,63 @@
+"""Download flags of top 20 countries by population
+
+Sequential version
+
+Sample run::
+
+ $ python3 flags.py
+ BD BR CD CN DE EG ET FR ID IN IR JP MX NG PH PK RU TR US VN
+ 20 flags downloaded in 5.49s
+
+"""
+# BEGIN FLAGS_PY
+import os
+import time
+import sys
+
+import requests # <1>
+
+POP20_CC = ('CN IN US ID BR PK NG BD RU JP '
+ 'MX PH VN ET EG DE IR TR CD FR').split() # <2>
+
+BASE_URL = 'http://flupy.org/data/flags' # <3>
+
+DEST_DIR = 'downloads/' # <4>
+
+
+def save_flag(img, filename): # <5>
+ path = os.path.join(DEST_DIR, filename)
+ with open(path, 'wb') as fp:
+ fp.write(img)
+
+
+def get_flag(cc): # <6>
+ url = '{}/{cc}/{cc}.gif'.format(BASE_URL, cc=cc.lower())
+ resp = requests.get(url)
+ return resp.content
+
+
+def show(text): # <7>
+ print(text, end=' ')
+ sys.stdout.flush()
+
+
+def download_many(cc_list): # <8>
+ for cc in sorted(cc_list): # <9>
+ image = get_flag(cc)
+ show(cc)
+ save_flag(image, cc.lower() + '.gif')
+
+ return len(cc_list)
+
+
+def main(): # <10>
+ t0 = time.time()
+ count = download_many(POP20_CC)
+ elapsed = time.time() - t0
+ msg = '\n{} flags downloaded in {:.2f}s'
+ print(msg.format(count, elapsed))
+
+
+if __name__ == '__main__':
+ main()
+# END FLAGS_PY
diff --git a/17-futures-py3.7/countries/flags2_asyncio.py b/17-futures-py3.7/countries/flags2_asyncio.py
new file mode 100644
index 0000000..2635155
--- /dev/null
+++ b/17-futures-py3.7/countries/flags2_asyncio.py
@@ -0,0 +1,103 @@
+"""Download flags of countries (with error handling).
+
+asyncio async/await version
+
+"""
+# BEGIN FLAGS2_ASYNCIO_TOP
+import asyncio
+import collections
+
+import aiohttp
+from aiohttp import web
+import tqdm
+
+from flags2_common import main, HTTPStatus, Result, save_flag
+
+# default set low to avoid errors from remote site, such as
+# 503 - Service Temporarily Unavailable
+DEFAULT_CONCUR_REQ = 5
+MAX_CONCUR_REQ = 1000
+
+
+class FetchError(Exception): # <1>
+ def __init__(self, country_code):
+ self.country_code = country_code
+
+
+async def get_flag(session, base_url, cc): # <2>
+ url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower())
+ async with session.get(url) as resp:
+ if resp.status == 200:
+ return await resp.read()
+ elif resp.status == 404:
+ raise web.HTTPNotFound()
+ else:
+ raise aiohttp.HttpProcessingError(
+ code=resp.status, message=resp.reason,
+ headers=resp.headers)
+
+
+async def download_one(session, cc, base_url, semaphore, verbose): # <3>
+ try:
+ async with semaphore: # <4>
+ image = await get_flag(session, base_url, cc) # <5>
+ except web.HTTPNotFound: # <6>
+ status = HTTPStatus.not_found
+ msg = 'not found'
+ except Exception as exc:
+ raise FetchError(cc) from exc # <7>
+ else:
+ save_flag(image, cc.lower() + '.gif') # <8>
+ status = HTTPStatus.ok
+ msg = 'OK'
+
+ if verbose and msg:
+ print(cc, msg)
+
+ return Result(status, cc)
+# END FLAGS2_ASYNCIO_TOP
+
+# BEGIN FLAGS2_ASYNCIO_DOWNLOAD_MANY
+async def downloader_coro(cc_list, base_url, verbose, concur_req): # <1>
+ counter = collections.Counter()
+ semaphore = asyncio.Semaphore(concur_req) # <2>
+ async with aiohttp.ClientSession() as session: # <8>
+ to_do = [download_one(session, cc, base_url, semaphore, verbose)
+ for cc in sorted(cc_list)] # <3>
+
+ to_do_iter = asyncio.as_completed(to_do) # <4>
+ if not verbose:
+ to_do_iter = tqdm.tqdm(to_do_iter, total=len(cc_list)) # <5>
+ for future in to_do_iter: # <6>
+ try:
+ res = await future # <7>
+ except FetchError as exc: # <8>
+ country_code = exc.country_code # <9>
+ try:
+ error_msg = exc.__cause__.args[0] # <10>
+ except IndexError:
+ error_msg = exc.__cause__.__class__.__name__ # <11>
+ if verbose and error_msg:
+ msg = '*** Error for {}: {}'
+ print(msg.format(country_code, error_msg))
+ status = HTTPStatus.error
+ else:
+ status = res.status
+
+ counter[status] += 1 # <12>
+
+ return counter # <13>
+
+
+def download_many(cc_list, base_url, verbose, concur_req):
+ loop = asyncio.get_event_loop()
+ coro = downloader_coro(cc_list, base_url, verbose, concur_req)
+ counts = loop.run_until_complete(coro) # <14>
+ loop.close() # <15>
+
+ return counts
+
+
+if __name__ == '__main__':
+ main(download_many, DEFAULT_CONCUR_REQ, MAX_CONCUR_REQ)
+# END FLAGS2_ASYNCIO_DOWNLOAD_MANY
diff --git a/17-futures-py3.7/countries/flags2_common.py b/17-futures-py3.7/countries/flags2_common.py
new file mode 100644
index 0000000..bfa40fb
--- /dev/null
+++ b/17-futures-py3.7/countries/flags2_common.py
@@ -0,0 +1,149 @@
+"""Utilities for second set of flag examples.
+"""
+
+import os
+import time
+import sys
+import string
+import argparse
+from collections import namedtuple
+from enum import Enum
+
+
+Result = namedtuple('Result', 'status data')
+
+HTTPStatus = Enum('Status', 'ok not_found error')
+
+POP20_CC = ('CN IN US ID BR PK NG BD RU JP '
+ 'MX PH VN ET EG DE IR TR CD FR').split()
+
+DEFAULT_CONCUR_REQ = 1
+MAX_CONCUR_REQ = 1
+
+SERVERS = {
+ 'REMOTE': 'http://flupy.org/data/flags',
+ 'LOCAL': 'http://localhost:8001/flags',
+ 'DELAY': 'http://localhost:8002/flags',
+ 'ERROR': 'http://localhost:8003/flags',
+}
+DEFAULT_SERVER = 'LOCAL'
+
+DEST_DIR = 'downloads/'
+COUNTRY_CODES_FILE = 'country_codes.txt'
+
+
+def save_flag(img, filename):
+ path = os.path.join(DEST_DIR, filename)
+ with open(path, 'wb') as fp:
+ fp.write(img)
+
+
+def initial_report(cc_list, actual_req, server_label):
+ if len(cc_list) <= 10:
+ cc_msg = ', '.join(cc_list)
+ else:
+ cc_msg = 'from {} to {}'.format(cc_list[0], cc_list[-1])
+ print('{} site: {}'.format(server_label, SERVERS[server_label]))
+ msg = 'Searching for {} flag{}: {}'
+ plural = 's' if len(cc_list) != 1 else ''
+ print(msg.format(len(cc_list), plural, cc_msg))
+ plural = 's' if actual_req != 1 else ''
+ msg = '{} concurrent connection{} will be used.'
+ print(msg.format(actual_req, plural))
+
+
+def final_report(cc_list, counter, start_time):
+ elapsed = time.time() - start_time
+ print('-' * 20)
+ msg = '{} flag{} downloaded.'
+ plural = 's' if counter[HTTPStatus.ok] != 1 else ''
+ print(msg.format(counter[HTTPStatus.ok], plural))
+ if counter[HTTPStatus.not_found]:
+ print(counter[HTTPStatus.not_found], 'not found.')
+ if counter[HTTPStatus.error]:
+ plural = 's' if counter[HTTPStatus.error] != 1 else ''
+ print('{} error{}.'.format(counter[HTTPStatus.error], plural))
+ print('Elapsed time: {:.2f}s'.format(elapsed))
+
+
+def expand_cc_args(every_cc, all_cc, cc_args, limit):
+ codes = set()
+ A_Z = string.ascii_uppercase
+ if every_cc:
+ codes.update(a+b for a in A_Z for b in A_Z)
+ elif all_cc:
+ with open(COUNTRY_CODES_FILE) as fp:
+ text = fp.read()
+ codes.update(text.split())
+ else:
+ for cc in (c.upper() for c in cc_args):
+ if len(cc) == 1 and cc in A_Z:
+ codes.update(cc+c for c in A_Z)
+ elif len(cc) == 2 and all(c in A_Z for c in cc):
+ codes.add(cc)
+ else:
+ msg = 'each CC argument must be A to Z or AA to ZZ.'
+ raise ValueError('*** Usage error: '+msg)
+ return sorted(codes)[:limit]
+
+
+def process_args(default_concur_req):
+ server_options = ', '.join(sorted(SERVERS))
+ parser = argparse.ArgumentParser(
+ description='Download flags for country codes. '
+ 'Default: top 20 countries by population.')
+ parser.add_argument('cc', metavar='CC', nargs='*',
+ help='country code or 1st letter (eg. B for BA...BZ)')
+ parser.add_argument('-a', '--all', action='store_true',
+ help='get all available flags (AD to ZW)')
+ parser.add_argument('-e', '--every', action='store_true',
+ help='get flags for every possible code (AA...ZZ)')
+ parser.add_argument('-l', '--limit', metavar='N', type=int,
+ help='limit to N first codes', default=sys.maxsize)
+ parser.add_argument('-m', '--max_req', metavar='CONCURRENT', type=int,
+ default=default_concur_req,
+ help='maximum concurrent requests (default={})'
+ .format(default_concur_req))
+ parser.add_argument('-s', '--server', metavar='LABEL',
+ default=DEFAULT_SERVER,
+ help='Server to hit; one of {} (default={})'
+ .format(server_options, DEFAULT_SERVER))
+ parser.add_argument('-v', '--verbose', action='store_true',
+ help='output detailed progress info')
+ args = parser.parse_args()
+ if args.max_req < 1:
+ print('*** Usage error: --max_req CONCURRENT must be >= 1')
+ parser.print_usage()
+ sys.exit(1)
+ if args.limit < 1:
+ print('*** Usage error: --limit N must be >= 1')
+ parser.print_usage()
+ sys.exit(1)
+ args.server = args.server.upper()
+ if args.server not in SERVERS:
+ print('*** Usage error: --server LABEL must be one of',
+ server_options)
+ parser.print_usage()
+ sys.exit(1)
+ try:
+ cc_list = expand_cc_args(args.every, args.all, args.cc, args.limit)
+ except ValueError as exc:
+ print(exc.args[0])
+ parser.print_usage()
+ sys.exit(1)
+
+ if not cc_list:
+ cc_list = sorted(POP20_CC)
+ return args, cc_list
+
+
+def main(download_many, default_concur_req, max_concur_req):
+ args, cc_list = process_args(default_concur_req)
+ actual_req = min(args.max_req, max_concur_req, len(cc_list))
+ initial_report(cc_list, actual_req, args.server)
+ base_url = SERVERS[args.server]
+ t0 = time.time()
+ counter = download_many(cc_list, base_url, args.verbose, actual_req)
+ assert sum(counter.values()) == len(cc_list), \
+ 'some downloads are unaccounted for'
+ final_report(cc_list, counter, t0)
diff --git a/17-futures-py3.7/countries/flags2_sequential.py b/17-futures-py3.7/countries/flags2_sequential.py
new file mode 100644
index 0000000..65a7e43
--- /dev/null
+++ b/17-futures-py3.7/countries/flags2_sequential.py
@@ -0,0 +1,87 @@
+"""Download flags of countries (with error handling).
+
+Sequential version
+
+Sample run::
+
+ $ python3 flags2_sequential.py -s DELAY b
+ DELAY site: http://localhost:8002/flags
+ Searching for 26 flags: from BA to BZ
+ 1 concurrent connection will be used.
+ --------------------
+ 17 flags downloaded.
+ 9 not found.
+ Elapsed time: 13.36s
+
+"""
+
+import collections
+
+import requests
+import tqdm
+
+from flags2_common import main, save_flag, HTTPStatus, Result
+
+
+DEFAULT_CONCUR_REQ = 1
+MAX_CONCUR_REQ = 1
+
+# BEGIN FLAGS2_BASIC_HTTP_FUNCTIONS
+def get_flag(base_url, cc):
+ url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower())
+ resp = requests.get(url)
+ if resp.status_code != 200: # <1>
+ resp.raise_for_status()
+ return resp.content
+
+
+def download_one(cc, base_url, verbose=False):
+ try:
+ image = get_flag(base_url, cc)
+ except requests.exceptions.HTTPError as exc: # <2>
+ res = exc.response
+ if res.status_code == 404:
+ status = HTTPStatus.not_found # <3>
+ msg = 'not found'
+ else: # <4>
+ raise
+ else:
+ save_flag(image, cc.lower() + '.gif')
+ status = HTTPStatus.ok
+ msg = 'OK'
+
+ if verbose: # <5>
+ print(cc, msg)
+
+ return Result(status, cc) # <6>
+# END FLAGS2_BASIC_HTTP_FUNCTIONS
+
+# BEGIN FLAGS2_DOWNLOAD_MANY_SEQUENTIAL
+def download_many(cc_list, base_url, verbose, max_req):
+ counter = collections.Counter() # <1>
+ cc_iter = sorted(cc_list) # <2>
+ if not verbose:
+ cc_iter = tqdm.tqdm(cc_iter) # <3>
+ for cc in cc_iter: # <4>
+ try:
+ res = download_one(cc, base_url, verbose) # <5>
+ except requests.exceptions.HTTPError as exc: # <6>
+ error_msg = 'HTTP error {res.status_code} - {res.reason}'
+ error_msg = error_msg.format(res=exc.response)
+ except requests.exceptions.ConnectionError as exc: # <7>
+ error_msg = 'Connection error'
+ else: # <8>
+ error_msg = ''
+ status = res.status
+
+ if error_msg:
+ status = HTTPStatus.error # <9>
+ counter[status] += 1 # <10>
+ if verbose and error_msg: # <11>
+ print('*** Error for {}: {}'.format(cc, error_msg))
+
+ return counter # <12>
+# END FLAGS2_DOWNLOAD_MANY_SEQUENTIAL
+
+if __name__ == '__main__':
+ main(download_many, DEFAULT_CONCUR_REQ, MAX_CONCUR_REQ)
diff --git a/17-futures-py3.7/countries/flags2_threadpool.py b/17-futures-py3.7/countries/flags2_threadpool.py
new file mode 100644
index 0000000..069d4ff
--- /dev/null
+++ b/17-futures-py3.7/countries/flags2_threadpool.py
@@ -0,0 +1,68 @@
+"""Download flags of countries (with error handling).
+
+ThreadPool version
+
+Sample run::
+
+ $ python3 flags2_threadpool.py -s ERROR -e
+ ERROR site: http://localhost:8003/flags
+ Searching for 676 flags: from AA to ZZ
+ 30 concurrent connections will be used.
+ --------------------
+ 150 flags downloaded.
+ 361 not found.
+ 165 errors.
+ Elapsed time: 7.46s
+
+"""
+
+# BEGIN FLAGS2_THREADPOOL
+import collections
+from concurrent import futures
+
+import requests
+import tqdm # <1>
+
+from flags2_common import main, HTTPStatus # <2>
+from flags2_sequential import download_one # <3>
+
+DEFAULT_CONCUR_REQ = 30 # <4>
+MAX_CONCUR_REQ = 1000 # <5>
+
+
+def download_many(cc_list, base_url, verbose, concur_req):
+ counter = collections.Counter()
+ with futures.ThreadPoolExecutor(max_workers=concur_req) as executor: # <6>
+ to_do_map = {} # <7>
+ for cc in sorted(cc_list): # <8>
+ future = executor.submit(download_one,
+ cc, base_url, verbose) # <9>
+ to_do_map[future] = cc # <10>
+ done_iter = futures.as_completed(to_do_map) # <11>
+ if not verbose:
+ done_iter = tqdm.tqdm(done_iter, total=len(cc_list)) # <12>
+ for future in done_iter: # <13>
+ try:
+ res = future.result() # <14>
+ except requests.exceptions.HTTPError as exc: # <15>
+ error_msg = 'HTTP {res.status_code} - {res.reason}'
+ error_msg = error_msg.format(res=exc.response)
+ except requests.exceptions.ConnectionError as exc:
+ error_msg = 'Connection error'
+ else:
+ error_msg = ''
+ status = res.status
+
+ if error_msg:
+ status = HTTPStatus.error
+ counter[status] += 1
+ if verbose and error_msg:
+ cc = to_do_map[future] # <16>
+ print('*** Error for {}: {}'.format(cc, error_msg))
+
+ return counter
+
+
+if __name__ == '__main__':
+ main(download_many, DEFAULT_CONCUR_REQ, MAX_CONCUR_REQ)
+# END FLAGS2_THREADPOOL
diff --git a/17-futures-py3.7/countries/flags_asyncio.py b/17-futures-py3.7/countries/flags_asyncio.py
new file mode 100644
index 0000000..89421f8
--- /dev/null
+++ b/17-futures-py3.7/countries/flags_asyncio.py
@@ -0,0 +1,72 @@
+"""Download flags of top 20 countries by population
+
+asyncio + aiottp version
+
+Sample run::
+
+ $ python3 flags_asyncio.py
+ CN EG BR IN ID RU NG VN JP DE TR PK FR ET MX PH US IR CD BD
+ 20 flags downloaded in 0.35s
+
+"""
+# BEGIN FLAGS_ASYNCIO
+import os
+import time
+import sys
+import asyncio # <1>
+
+import aiohttp # <2>
+
+
+POP20_CC = ('CN IN US ID BR PK NG BD RU JP '
+ 'MX PH VN ET EG DE IR TR CD FR').split()
+
+BASE_URL = 'http://flupy.org/data/flags'
+
+DEST_DIR = 'downloads/'
+
+
+def save_flag(img, filename):
+ path = os.path.join(DEST_DIR, filename)
+ with open(path, 'wb') as fp:
+ fp.write(img)
+
+
+async def get_flag(session, cc): # <3>
+ url = '{}/{cc}/{cc}.gif'.format(BASE_URL, cc=cc.lower())
+ async with session.get(url) as resp: # <4>
+ return await resp.read() # <5>
+
+
+def show(text):
+ print(text, end=' ')
+ sys.stdout.flush()
+
+
+async def download_one(session, cc): # <6>
+ image = await get_flag(session, cc) # <7>
+ show(cc)
+ save_flag(image, cc.lower() + '.gif')
+ return cc
+
+
+async def download_many(cc_list):
+ async with aiohttp.ClientSession() as session: # <8>
+ res = await asyncio.gather( # <9>
+ *[asyncio.create_task(download_one(session, cc))
+ for cc in sorted(cc_list)])
+
+ return len(res)
+
+
+def main(): # <10>
+ t0 = time.time()
+ count = asyncio.run(download_many(POP20_CC))
+ elapsed = time.time() - t0
+ msg = '\n{} flags downloaded in {:.2f}s'
+ print(msg.format(count, elapsed))
+
+
+if __name__ == '__main__':
+ main()
+# END FLAGS_ASYNCIO
diff --git a/17-futures-py3.7/countries/flags_threadpool.py b/17-futures-py3.7/countries/flags_threadpool.py
new file mode 100644
index 0000000..47a5ee6
--- /dev/null
+++ b/17-futures-py3.7/countries/flags_threadpool.py
@@ -0,0 +1,71 @@
+"""Download flags of top 20 countries by population
+
+ThreadPoolExecutor version
+
+Sample run::
+
+ $ python3 flags_threadpool.py
+ DE FR BD CN EG RU IN TR VN ID JP BR NG MX PK ET PH CD US IR
+ 20 flags downloaded in 0.35s
+
+"""
+# BEGIN FLAGS_THREADPOOL
+import os
+import time
+import sys
+from concurrent import futures # <1>
+
+import requests
+
+POP20_CC = ('CN IN US ID BR PK NG BD RU JP '
+ 'MX PH VN ET EG DE IR TR CD FR').split()
+
+BASE_URL = 'http://flupy.org/data/flags'
+
+DEST_DIR = 'downloads/'
+
+MAX_WORKERS = 20 # <2>
+
+def save_flag(img, filename):
+ path = os.path.join(DEST_DIR, filename)
+ with open(path, 'wb') as fp:
+ fp.write(img)
+
+
+def get_flag(cc):
+ url = '{}/{cc}/{cc}.gif'.format(BASE_URL, cc=cc.lower())
+ resp = requests.get(url)
+ return resp.content
+
+
+def show(text):
+ print(text, end=' ')
+ sys.stdout.flush()
+
+
+def download_one(cc): # <3>
+ image = get_flag(cc)
+ show(cc)
+ save_flag(image, cc.lower() + '.gif')
+ return cc
+
+
+def download_many(cc_list):
+ workers = min(MAX_WORKERS, len(cc_list)) # <4>
+ with futures.ThreadPoolExecutor(workers) as executor: # <5>
+ res = executor.map(download_one, sorted(cc_list)) # <6>
+
+ return len(list(res)) # <7>
+
+
+def main(): # <10>
+ t0 = time.time()
+ count = download_many(POP20_CC)
+ elapsed = time.time() - t0
+ msg = '\n{} flags downloaded in {:.2f}s'
+ print(msg.format(count, elapsed))
+
+
+if __name__ == '__main__':
+ main()
+# END FLAGS_THREADPOOL
diff --git a/17-futures-py3.7/countries/requirements.txt b/17-futures-py3.7/countries/requirements.txt
new file mode 100644
index 0000000..aa7a6de
--- /dev/null
+++ b/17-futures-py3.7/countries/requirements.txt
@@ -0,0 +1,2 @@
+requests==2.21.0
+aiohttp==3.5.4
diff --git a/17-futures-py3.7/demo_executor_map.py b/17-futures-py3.7/demo_executor_map.py
new file mode 100644
index 0000000..f3625cf
--- /dev/null
+++ b/17-futures-py3.7/demo_executor_map.py
@@ -0,0 +1,34 @@
+"""
+Experiment with ``ThreadPoolExecutor.map``
+"""
+# BEGIN EXECUTOR_MAP
+from time import sleep, strftime
+from concurrent import futures
+
+
+def display(*args): # <1>
+ print(strftime('[%H:%M:%S]'), end=' ')
+ print(*args)
+
+
+def loiter(n): # <2>
+ msg = '{}loiter({}): doing nothing for {}s...'
+ display(msg.format('\t'*n, n, n))
+ sleep(n)
+ msg = '{}loiter({}): done.'
+ display(msg.format('\t'*n, n))
+ return n * 10 # <3>
+
+
+def main():
+ display('Script starting.')
+ executor = futures.ThreadPoolExecutor(max_workers=3) # <4>
+ results = executor.map(loiter, range(5)) # <5>
+ display('results:', results) # <6>.
+ display('Waiting for individual results:')
+ for i, result in enumerate(results): # <7>
+ display('result {}: {}'.format(i, result))
+
+
+main()
+# END EXECUTOR_MAP
diff --git a/17-futures/README.rst b/17-futures/README.rst
new file mode 100644
index 0000000..194d1ef
--- /dev/null
+++ b/17-futures/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 17 - "Concurrency with futures"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/17-futures/countries/README.rst b/17-futures/countries/README.rst
new file mode 100644
index 0000000..3785ffd
--- /dev/null
+++ b/17-futures/countries/README.rst
@@ -0,0 +1,194 @@
+============================
+Setting up Nginx and Vaurien
+============================
+
+This text explains how to configure Nginx and Vaurien to test HTTP client code while avoiding network traffic and introducing simulated delays and errors. This setup is necessary if you want to experiment with the ``flags2*.py`` image download examples in this directory -- covered in chapters 17 and 18 of Fluent Python.
+
+
+Overview
+========
+
+The flag download examples are designed to compare the performance of different approaches to finding and downloading files from the Web. However, we don't want to hit a public server with multiple requests per second while testing, and we want to be able to simulate high latency and random network errors.
+
+For this setup I chose Nginx as the HTTP server because it is very fast and easy to configure, and the Vaurien proxy because it was designed by Mozilla to introduce delays and network errors for testing Web services.
+
+The archive ``flags.zip``, contains a directory ``flags/`` with 194 subdirectories, each containing a ``.gif`` image and a ``metadata.json`` file. These are public-domain images copied from the `CIA World Fact Book `_.
+
+Once these files are unpacked to the ``flags/`` directory and Nginx is configured, you can experiment with the ``flags2*.py`` examples without hitting the network.
+
+
+Procedure
+=========
+
+1. Unpack test data
+-------------------
+
+The instructions in this section are for GNU/Linux or OSX using the command line. Windows users should have no difficulty doing the same operations with the Windows Exporer GUI.
+
+Unpack the initial data in the ``countries/`` directory::
+
+ $ unzip flags.zip
+ ... many lines omitted ...
+ creating: flags/zw/
+ inflating: flags/zw/metadata.json
+ inflating: flags/zw/zw.gif
+
+
+Verify that 194 directories are created in ``countries/flags/``, each with a ``.gif`` and a ``metadata.json`` file::
+
+
+ $ ls flags | wc -w
+ 194
+ $ find flags | grep .gif | wc -l
+ 194
+ $ find flags | grep .json | wc -l
+ 194
+ $ ls flags/ad
+ ad.gif metadata.json
+
+
+2. Install Nginx
+----------------
+
+Download and install Nginx. I used version 1.6.2 -- the latest stable version as I write this.
+
+* Download page: http://nginx.org/en/download.html
+
+* Beginner's guide: http://nginx.org/en/docs/beginners_guide.html
+
+
+3. Configure Nginx
+------------------
+
+Edit the the ``nginx.conf`` file to set the port and document root. You can determine which ``nginx.conf`` is in use by running::
+
+
+ $ nginx -V
+
+
+The output starts with::
+
+ nginx version: nginx/1.6.2
+ built by clang 6.0 (clang-600.0.51) (based on LLVM 3.5svn)
+ TLS SNI support enabled
+ configure arguments:...
+
+
+Among the configure arguments you'll see ``--conf-path=``. That's the file you will edit.
+
+Most of the content in ``nginx.conf`` is within a block labeled ``http`` and enclosed in curly braces. Within that block there can be multiple blocks labeled ``server``. Add another ``server`` block like this one::
+
+
+ server {
+ listen 8001;
+
+ location /flags/ {
+ root /full-path-to.../countries/;
+ }
+ }
+
+
+After editing ``nginx.conf`` the server must be started (if it's not running) or told to reload the configuration file::
+
+
+ $ nginx # to start, if necessary
+ $ nginx -s reload # to reload the configuration
+
+
+To test the configuration, open the URL http://localhost:8001/flags/ad/ad.gif in a browser. You should see the blue, yellow and red flag of Andorra.
+
+If the test fails, please double check the procedure just described and refer to the Nginx documentation.
+
+At this point you may run the ``flags_*2.py`` examples against the Nginx install by providing the ``--server LOCAL`` command line option. For example::
+
+
+ $ python3 flags2_threadpool.py -s LOCAL
+ LOCAL site: http://localhost:8001/flags
+ Searching for 20 flags: from BD to VN
+ 20 concurrent connections will be used.
+ --------------------
+ 20 flags downloaded.
+ Elapsed time: 0.09s
+
+
+Note that Nginx is so fast that you will not see much difference in run time between the sequential and the concurrent versions. For more realistic testing with simulated network lag, we need to set up the Vaurien proxy.
+
+
+4. Install and run Vaurien
+--------------------------
+
+Vaurien depends on gevent which is only available for Python 2.5-2.7. To install Vaurien I opened another shell, created a virtualenv for Python 2.7, and used that environment to install and run the ``vaurien`` package::
+
+
+ $ virtualenv-2.7 .env27 --no-site-packages --distribute
+ New python executable in .env27/bin/python
+ Installing setuptools, pip...done.
+ $ . .env27/bin/activate
+ (.env27)$ pip install vaurien
+ Downloading/unpacking vaurien
+ Downloading vaurien-1.9.tar.gz (50kB): 50kB downloaded
+ ...many lines and a few minutes later...
+
+ Successfully installed vaurien cornice gevent statsd-client vaurienclient
+ greenlet http-parser pyramid simplejson requests zope.interface
+ translationstring PasteDeploy WebOb repoze.lru zope.deprecation venusian
+ Cleaning up...
+
+
+Using that same shell with the ``.env27`` environment active, run the ``vaurien_delay.sh`` script in the ``countries/`` directory::
+
+
+ (.env27)$ $ ./vaurien_delay.sh
+ 2015-02-25 20:20:17 [69124] [INFO] Starting the Chaos TCP Server
+ 2015-02-25 20:20:17 [69124] [INFO] Options:
+ 2015-02-25 20:20:17 [69124] [INFO] * proxies from localhost:8002 to localhost:8001
+ 2015-02-25 20:20:17 [69124] [INFO] * timeout: 30
+ 2015-02-25 20:20:17 [69124] [INFO] * stay_connected: 0
+ 2015-02-25 20:20:17 [69124] [INFO] * pool_max_size: 100
+ 2015-02-25 20:20:17 [69124] [INFO] * pool_timeout: 30
+ 2015-02-25 20:20:17 [69124] [INFO] * async_mode: 1
+
+
+The ``vaurien_delay.sh`` creates an HTTP proxy on port 8002 which adds a 1s delay to every response. You can test it with a browser on port 8002: http://localhost:8002/flags/ad/ad.gif -- the flag of Andorra should appear after 1 second.
+
+There is also the ``vaurien_error_delay.sh`` script which runs a proxy on port 8003 producing errors in 25% of the responses and a .5 s delay to 50% of the responses. You can also test it with the browser on port 8003, but rememeber that errors are expected.
+
+
+Platform-specific instructions
+==============================
+
+
+Nginx setup on Mac OS X
+------------------------
+
+Homebrew (copy & paste code at the bottom of http://brew.sh/)::
+
+
+ $ ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
+ $ brew doctor
+ $ brew install nginx
+
+
+Download and unpack::
+
+Docroot is: /usr/local/var/www
+/usr/local/etc/nginx/nginx.conf
+
+
+::
+
+ To have launchd start nginx at login:
+ ln -sfv /usr/local/opt/nginx/*.plist ~/Library/LaunchAgents
+ Then to load nginx now:
+ launchctl load ~/Library/LaunchAgents/homebrew.mxcl.nginx.plist
+ Or, if you don't want/need launchctl, you can just run:
+ nginx
+
+
+
+Nginx setup on Lubuntu 14.04.1 LTS
+----------------------------------
+
+Docroot is: /usr/share/nginx/html
+
+
diff --git a/17-futures/countries/country_codes.txt b/17-futures/countries/country_codes.txt
new file mode 100644
index 0000000..72c37f0
--- /dev/null
+++ b/17-futures/countries/country_codes.txt
@@ -0,0 +1,8 @@
+AD AE AF AG AL AM AO AR AT AU AZ BA BB BD BE BF BG BH BI BJ BN BO BR BS BT
+BW BY BZ CA CD CF CG CH CI CL CM CN CO CR CU CV CY CZ DE DJ DK DM DZ EC EE
+EG ER ES ET FI FJ FM FR GA GB GD GE GH GM GN GQ GR GT GW GY HN HR HT HU ID
+IE IL IN IQ IR IS IT JM JO JP KE KG KH KI KM KN KP KR KW KZ LA LB LC LI LK
+LR LS LT LU LV LY MA MC MD ME MG MH MK ML MM MN MR MT MU MV MW MX MY MZ NA
+NE NG NI NL NO NP NR NZ OM PA PE PG PH PK PL PT PW PY QA RO RS RU RW SA SB
+SC SD SE SG SI SK SL SM SN SO SR SS ST SV SY SZ TD TG TH TJ TL TM TN TO TR
+TT TV TW TZ UA UG US UY UZ VA VC VE VN VU WS YE ZA ZM ZW
diff --git a/17-futures/countries/flags.py b/17-futures/countries/flags.py
new file mode 100644
index 0000000..7731bc4
--- /dev/null
+++ b/17-futures/countries/flags.py
@@ -0,0 +1,63 @@
+"""Download flags of top 20 countries by population
+
+Sequential version
+
+Sample run::
+
+ $ python3 flags.py
+ BD BR CD CN DE EG ET FR ID IN IR JP MX NG PH PK RU TR US VN
+ 20 flags downloaded in 10.16s
+
+"""
+# BEGIN FLAGS_PY
+import os
+import time
+import sys
+
+import requests # <1>
+
+POP20_CC = ('CN IN US ID BR PK NG BD RU JP '
+ 'MX PH VN ET EG DE IR TR CD FR').split() # <2>
+
+BASE_URL = 'http://flupy.org/data/flags' # <3>
+
+DEST_DIR = 'downloads/' # <4>
+
+
+def save_flag(img, filename): # <5>
+ path = os.path.join(DEST_DIR, filename)
+ with open(path, 'wb') as fp:
+ fp.write(img)
+
+
+def get_flag(cc): # <6>
+ url = '{}/{cc}/{cc}.gif'.format(BASE_URL, cc=cc.lower())
+ resp = requests.get(url)
+ return resp.content
+
+
+def show(text): # <7>
+ print(text, end=' ')
+ sys.stdout.flush()
+
+
+def download_many(cc_list): # <8>
+ for cc in sorted(cc_list): # <9>
+ image = get_flag(cc)
+ show(cc)
+ save_flag(image, cc.lower() + '.gif')
+
+ return len(cc_list)
+
+
+def main(download_many): # <10>
+ t0 = time.time()
+ count = download_many(POP20_CC)
+ elapsed = time.time() - t0
+ msg = '\n{} flags downloaded in {:.2f}s'
+ print(msg.format(count, elapsed))
+
+
+if __name__ == '__main__':
+ main(download_many) # <11>
+# END FLAGS_PY
diff --git a/17-futures/countries/flags.zip b/17-futures/countries/flags.zip
new file mode 100644
index 0000000..671b5a0
Binary files /dev/null and b/17-futures/countries/flags.zip differ
diff --git a/17-futures/countries/flags2_asyncio.py b/17-futures/countries/flags2_asyncio.py
new file mode 100644
index 0000000..d3c78f8
--- /dev/null
+++ b/17-futures/countries/flags2_asyncio.py
@@ -0,0 +1,120 @@
+"""Download flags of countries (with error handling).
+
+asyncio yield-from version
+
+Sample run::
+
+ $ python3 flags2_asyncio.py -s ERROR -e -m 200
+ ERROR site: http://localhost:8003/flags
+ Searching for 676 flags: from AA to ZZ
+ 200 concurrent connections will be used.
+ --------------------
+ 146 flags downloaded.
+ 363 not found.
+ 167 errors.
+ Elapsed time: 2.59s
+
+"""
+# BEGIN FLAGS2_ASYNCIO_TOP
+import asyncio
+import collections
+import contextlib
+
+import aiohttp
+from aiohttp import web
+import tqdm
+
+from flags2_common import main, HTTPStatus, Result, save_flag
+
+# default set low to avoid errors from remote site, such as
+# 503 - Service Temporarily Unavailable
+DEFAULT_CONCUR_REQ = 5
+MAX_CONCUR_REQ = 1000
+
+
+class FetchError(Exception): # <1>
+ def __init__(self, country_code):
+ self.country_code = country_code
+
+
+@asyncio.coroutine
+def get_flag(base_url, cc): # <2>
+ url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower())
+ resp = yield from aiohttp.request('GET', url)
+ with contextlib.closing(resp):
+ if resp.status == 200:
+ image = yield from resp.read()
+ return image
+ elif resp.status == 404:
+ raise web.HTTPNotFound()
+ else:
+ raise aiohttp.HttpProcessingError(
+ code=resp.status, message=resp.reason,
+ headers=resp.headers)
+
+
+@asyncio.coroutine
+def download_one(cc, base_url, semaphore, verbose): # <3>
+ try:
+ with (yield from semaphore): # <4>
+ image = yield from get_flag(base_url, cc) # <5>
+ except web.HTTPNotFound: # <6>
+ status = HTTPStatus.not_found
+ msg = 'not found'
+ except Exception as exc:
+ raise FetchError(cc) from exc # <7>
+ else:
+ save_flag(image, cc.lower() + '.gif') # <8>
+ status = HTTPStatus.ok
+ msg = 'OK'
+
+ if verbose and msg:
+ print(cc, msg)
+
+ return Result(status, cc)
+# END FLAGS2_ASYNCIO_TOP
+
+# BEGIN FLAGS2_ASYNCIO_DOWNLOAD_MANY
+@asyncio.coroutine
+def downloader_coro(cc_list, base_url, verbose, concur_req): # <1>
+ counter = collections.Counter()
+ semaphore = asyncio.Semaphore(concur_req) # <2>
+ to_do = [download_one(cc, base_url, semaphore, verbose)
+ for cc in sorted(cc_list)] # <3>
+
+ to_do_iter = asyncio.as_completed(to_do) # <4>
+ if not verbose:
+ to_do_iter = tqdm.tqdm(to_do_iter, total=len(cc_list)) # <5>
+ for future in to_do_iter: # <6>
+ try:
+ res = yield from future # <7>
+ except FetchError as exc: # <8>
+ country_code = exc.country_code # <9>
+ try:
+ error_msg = exc.__cause__.args[0] # <10>
+ except IndexError:
+ error_msg = exc.__cause__.__class__.__name__ # <11>
+ if verbose and error_msg:
+ msg = '*** Error for {}: {}'
+ print(msg.format(country_code, error_msg))
+ status = HTTPStatus.error
+ else:
+ status = res.status
+
+ counter[status] += 1 # <12>
+
+ return counter # <13>
+
+
+def download_many(cc_list, base_url, verbose, concur_req):
+ loop = asyncio.get_event_loop()
+ coro = downloader_coro(cc_list, base_url, verbose, concur_req)
+ counts = loop.run_until_complete(coro) # <14>
+ loop.close() # <15>
+
+ return counts
+
+
+if __name__ == '__main__':
+ main(download_many, DEFAULT_CONCUR_REQ, MAX_CONCUR_REQ)
+# END FLAGS2_ASYNCIO_DOWNLOAD_MANY
diff --git a/17-futures/countries/flags2_asyncio_executor.py b/17-futures/countries/flags2_asyncio_executor.py
new file mode 100644
index 0000000..53ade4b
--- /dev/null
+++ b/17-futures/countries/flags2_asyncio_executor.py
@@ -0,0 +1,109 @@
+"""Download flags of countries (with error handling).
+
+asyncio version using thread pool to save files
+
+"""
+
+import asyncio
+import collections
+import contextlib
+
+import aiohttp
+from aiohttp import web
+import tqdm
+
+from flags2_common import main, HTTPStatus, Result, save_flag
+
+# default set low to avoid errors from remote site, such as
+# 503 - Service Temporarily Unavailable
+DEFAULT_CONCUR_REQ = 5
+MAX_CONCUR_REQ = 1000
+
+
+class FetchError(Exception):
+ def __init__(self, country_code):
+ self.country_code = country_code
+
+
+@asyncio.coroutine
+def get_flag(base_url, cc):
+ url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower())
+ resp = yield from aiohttp.request('GET', url)
+ with contextlib.closing(resp):
+ if resp.status == 200:
+ image = yield from resp.read()
+ return image
+ elif resp.status == 404:
+ raise web.HTTPNotFound()
+ else:
+ raise aiohttp.HttpProcessingError(
+ code=resp.status, message=resp.reason,
+ headers=resp.headers)
+
+
+# BEGIN FLAGS2_ASYNCIO_EXECUTOR
+@asyncio.coroutine
+def download_one(cc, base_url, semaphore, verbose):
+ try:
+ with (yield from semaphore):
+ image = yield from get_flag(base_url, cc)
+ except web.HTTPNotFound:
+ status = HTTPStatus.not_found
+ msg = 'not found'
+ except Exception as exc:
+ raise FetchError(cc) from exc
+ else:
+ loop = asyncio.get_event_loop() # <1>
+ loop.run_in_executor(None, # <2>
+ save_flag, image, cc.lower() + '.gif') # <3>
+ status = HTTPStatus.ok
+ msg = 'OK'
+
+ if verbose and msg:
+ print(cc, msg)
+
+ return Result(status, cc)
+# END FLAGS2_ASYNCIO_EXECUTOR
+
+@asyncio.coroutine
+def downloader_coro(cc_list, base_url, verbose, concur_req):
+ counter = collections.Counter()
+ semaphore = asyncio.Semaphore(concur_req)
+ to_do = [download_one(cc, base_url, semaphore, verbose)
+ for cc in sorted(cc_list)]
+
+ to_do_iter = asyncio.as_completed(to_do)
+ if not verbose:
+ to_do_iter = tqdm.tqdm(to_do_iter, total=len(cc_list))
+ for future in to_do_iter:
+ try:
+ res = yield from future
+ except FetchError as exc:
+ country_code = exc.country_code
+ try:
+ error_msg = exc.__cause__.args[0]
+ except IndexError:
+ error_msg = exc.__cause__.__class__.__name__
+ if verbose and error_msg:
+ msg = '*** Error for {}: {}'
+ print(msg.format(country_code, error_msg))
+ status = HTTPStatus.error
+ else:
+ status = res.status
+
+ counter[status] += 1
+
+ return counter
+
+
+def download_many(cc_list, base_url, verbose, concur_req):
+ loop = asyncio.get_event_loop()
+ coro = downloader_coro(cc_list, base_url, verbose, concur_req)
+ counts = loop.run_until_complete(coro)
+ loop.close()
+
+ return counts
+
+
+if __name__ == '__main__':
+ main(download_many, DEFAULT_CONCUR_REQ, MAX_CONCUR_REQ)
diff --git a/17-futures/countries/flags2_await.py b/17-futures/countries/flags2_await.py
new file mode 100644
index 0000000..8443484
--- /dev/null
+++ b/17-futures/countries/flags2_await.py
@@ -0,0 +1,104 @@
+"""Download flags of countries (with error handling).
+
+asyncio async/await version
+
+"""
+# BEGIN FLAGS2_ASYNCIO_TOP
+import asyncio
+import collections
+from contextlib import closing
+
+import aiohttp
+from aiohttp import web
+import tqdm
+
+from flags2_common import main, HTTPStatus, Result, save_flag
+
+# default set low to avoid errors from remote site, such as
+# 503 - Service Temporarily Unavailable
+DEFAULT_CONCUR_REQ = 5
+MAX_CONCUR_REQ = 1000
+
+
+class FetchError(Exception): # <1>
+ def __init__(self, country_code):
+ self.country_code = country_code
+
+
+async def get_flag(base_url, cc): # <2>
+ url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower())
+ with closing(await aiohttp.request('GET', url)) as resp:
+ if resp.status == 200:
+ image = await resp.read()
+ return image
+ elif resp.status == 404:
+ raise web.HTTPNotFound()
+ else:
+ raise aiohttp.HttpProcessingError(
+ code=resp.status, message=resp.reason,
+ headers=resp.headers)
+
+
+async def download_one(cc, base_url, semaphore, verbose): # <3>
+ try:
+ with (await semaphore): # <4>
+ image = await get_flag(base_url, cc) # <5>
+ except web.HTTPNotFound: # <6>
+ status = HTTPStatus.not_found
+ msg = 'not found'
+ except Exception as exc:
+ raise FetchError(cc) from exc # <7>
+ else:
+ save_flag(image, cc.lower() + '.gif') # <8>
+ status = HTTPStatus.ok
+ msg = 'OK'
+
+ if verbose and msg:
+ print(cc, msg)
+
+ return Result(status, cc)
+# END FLAGS2_ASYNCIO_TOP
+
+# BEGIN FLAGS2_ASYNCIO_DOWNLOAD_MANY
+async def downloader_coro(cc_list, base_url, verbose, concur_req): # <1>
+ counter = collections.Counter()
+ semaphore = asyncio.Semaphore(concur_req) # <2>
+ to_do = [download_one(cc, base_url, semaphore, verbose)
+ for cc in sorted(cc_list)] # <3>
+
+ to_do_iter = asyncio.as_completed(to_do) # <4>
+ if not verbose:
+ to_do_iter = tqdm.tqdm(to_do_iter, total=len(cc_list)) # <5>
+ for future in to_do_iter: # <6>
+ try:
+ res = await future # <7>
+ except FetchError as exc: # <8>
+ country_code = exc.country_code # <9>
+ try:
+ error_msg = exc.__cause__.args[0] # <10>
+ except IndexError:
+ error_msg = exc.__cause__.__class__.__name__ # <11>
+ if verbose and error_msg:
+ msg = '*** Error for {}: {}'
+ print(msg.format(country_code, error_msg))
+ status = HTTPStatus.error
+ else:
+ status = res.status
+
+ counter[status] += 1 # <12>
+
+ return counter # <13>
+
+
+def download_many(cc_list, base_url, verbose, concur_req):
+ loop = asyncio.get_event_loop()
+ coro = downloader_coro(cc_list, base_url, verbose, concur_req)
+ counts = loop.run_until_complete(coro) # <14>
+ loop.close() # <15>
+
+ return counts
+
+
+if __name__ == '__main__':
+ main(download_many, DEFAULT_CONCUR_REQ, MAX_CONCUR_REQ)
+# END FLAGS2_ASYNCIO_DOWNLOAD_MANY
diff --git a/17-futures/countries/flags2_common.py b/17-futures/countries/flags2_common.py
new file mode 100644
index 0000000..bfa40fb
--- /dev/null
+++ b/17-futures/countries/flags2_common.py
@@ -0,0 +1,149 @@
+"""Utilities for second set of flag examples.
+"""
+
+import os
+import time
+import sys
+import string
+import argparse
+from collections import namedtuple
+from enum import Enum
+
+
+Result = namedtuple('Result', 'status data')
+
+HTTPStatus = Enum('Status', 'ok not_found error')
+
+POP20_CC = ('CN IN US ID BR PK NG BD RU JP '
+ 'MX PH VN ET EG DE IR TR CD FR').split()
+
+DEFAULT_CONCUR_REQ = 1
+MAX_CONCUR_REQ = 1
+
+SERVERS = {
+ 'REMOTE': 'http://flupy.org/data/flags',
+ 'LOCAL': 'http://localhost:8001/flags',
+ 'DELAY': 'http://localhost:8002/flags',
+ 'ERROR': 'http://localhost:8003/flags',
+}
+DEFAULT_SERVER = 'LOCAL'
+
+DEST_DIR = 'downloads/'
+COUNTRY_CODES_FILE = 'country_codes.txt'
+
+
+def save_flag(img, filename):
+ path = os.path.join(DEST_DIR, filename)
+ with open(path, 'wb') as fp:
+ fp.write(img)
+
+
+def initial_report(cc_list, actual_req, server_label):
+ if len(cc_list) <= 10:
+ cc_msg = ', '.join(cc_list)
+ else:
+ cc_msg = 'from {} to {}'.format(cc_list[0], cc_list[-1])
+ print('{} site: {}'.format(server_label, SERVERS[server_label]))
+ msg = 'Searching for {} flag{}: {}'
+ plural = 's' if len(cc_list) != 1 else ''
+ print(msg.format(len(cc_list), plural, cc_msg))
+ plural = 's' if actual_req != 1 else ''
+ msg = '{} concurrent connection{} will be used.'
+ print(msg.format(actual_req, plural))
+
+
+def final_report(cc_list, counter, start_time):
+ elapsed = time.time() - start_time
+ print('-' * 20)
+ msg = '{} flag{} downloaded.'
+ plural = 's' if counter[HTTPStatus.ok] != 1 else ''
+ print(msg.format(counter[HTTPStatus.ok], plural))
+ if counter[HTTPStatus.not_found]:
+ print(counter[HTTPStatus.not_found], 'not found.')
+ if counter[HTTPStatus.error]:
+ plural = 's' if counter[HTTPStatus.error] != 1 else ''
+ print('{} error{}.'.format(counter[HTTPStatus.error], plural))
+ print('Elapsed time: {:.2f}s'.format(elapsed))
+
+
+def expand_cc_args(every_cc, all_cc, cc_args, limit):
+ codes = set()
+ A_Z = string.ascii_uppercase
+ if every_cc:
+ codes.update(a+b for a in A_Z for b in A_Z)
+ elif all_cc:
+ with open(COUNTRY_CODES_FILE) as fp:
+ text = fp.read()
+ codes.update(text.split())
+ else:
+ for cc in (c.upper() for c in cc_args):
+ if len(cc) == 1 and cc in A_Z:
+ codes.update(cc+c for c in A_Z)
+ elif len(cc) == 2 and all(c in A_Z for c in cc):
+ codes.add(cc)
+ else:
+ msg = 'each CC argument must be A to Z or AA to ZZ.'
+ raise ValueError('*** Usage error: '+msg)
+ return sorted(codes)[:limit]
+
+
+def process_args(default_concur_req):
+ server_options = ', '.join(sorted(SERVERS))
+ parser = argparse.ArgumentParser(
+ description='Download flags for country codes. '
+ 'Default: top 20 countries by population.')
+ parser.add_argument('cc', metavar='CC', nargs='*',
+ help='country code or 1st letter (eg. B for BA...BZ)')
+ parser.add_argument('-a', '--all', action='store_true',
+ help='get all available flags (AD to ZW)')
+ parser.add_argument('-e', '--every', action='store_true',
+ help='get flags for every possible code (AA...ZZ)')
+ parser.add_argument('-l', '--limit', metavar='N', type=int,
+ help='limit to N first codes', default=sys.maxsize)
+ parser.add_argument('-m', '--max_req', metavar='CONCURRENT', type=int,
+ default=default_concur_req,
+ help='maximum concurrent requests (default={})'
+ .format(default_concur_req))
+ parser.add_argument('-s', '--server', metavar='LABEL',
+ default=DEFAULT_SERVER,
+ help='Server to hit; one of {} (default={})'
+ .format(server_options, DEFAULT_SERVER))
+ parser.add_argument('-v', '--verbose', action='store_true',
+ help='output detailed progress info')
+ args = parser.parse_args()
+ if args.max_req < 1:
+ print('*** Usage error: --max_req CONCURRENT must be >= 1')
+ parser.print_usage()
+ sys.exit(1)
+ if args.limit < 1:
+ print('*** Usage error: --limit N must be >= 1')
+ parser.print_usage()
+ sys.exit(1)
+ args.server = args.server.upper()
+ if args.server not in SERVERS:
+ print('*** Usage error: --server LABEL must be one of',
+ server_options)
+ parser.print_usage()
+ sys.exit(1)
+ try:
+ cc_list = expand_cc_args(args.every, args.all, args.cc, args.limit)
+ except ValueError as exc:
+ print(exc.args[0])
+ parser.print_usage()
+ sys.exit(1)
+
+ if not cc_list:
+ cc_list = sorted(POP20_CC)
+ return args, cc_list
+
+
+def main(download_many, default_concur_req, max_concur_req):
+ args, cc_list = process_args(default_concur_req)
+ actual_req = min(args.max_req, max_concur_req, len(cc_list))
+ initial_report(cc_list, actual_req, args.server)
+ base_url = SERVERS[args.server]
+ t0 = time.time()
+ counter = download_many(cc_list, base_url, args.verbose, actual_req)
+ assert sum(counter.values()) == len(cc_list), \
+ 'some downloads are unaccounted for'
+ final_report(cc_list, counter, t0)
diff --git a/17-futures/countries/flags2_sequential.py b/17-futures/countries/flags2_sequential.py
new file mode 100644
index 0000000..65a7e43
--- /dev/null
+++ b/17-futures/countries/flags2_sequential.py
@@ -0,0 +1,87 @@
+"""Download flags of countries (with error handling).
+
+Sequential version
+
+Sample run::
+
+ $ python3 flags2_sequential.py -s DELAY b
+ DELAY site: http://localhost:8002/flags
+ Searching for 26 flags: from BA to BZ
+ 1 concurrent connection will be used.
+ --------------------
+ 17 flags downloaded.
+ 9 not found.
+ Elapsed time: 13.36s
+
+"""
+
+import collections
+
+import requests
+import tqdm
+
+from flags2_common import main, save_flag, HTTPStatus, Result
+
+
+DEFAULT_CONCUR_REQ = 1
+MAX_CONCUR_REQ = 1
+
+# BEGIN FLAGS2_BASIC_HTTP_FUNCTIONS
+def get_flag(base_url, cc):
+ url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower())
+ resp = requests.get(url)
+ if resp.status_code != 200: # <1>
+ resp.raise_for_status()
+ return resp.content
+
+
+def download_one(cc, base_url, verbose=False):
+ try:
+ image = get_flag(base_url, cc)
+ except requests.exceptions.HTTPError as exc: # <2>
+ res = exc.response
+ if res.status_code == 404:
+ status = HTTPStatus.not_found # <3>
+ msg = 'not found'
+ else: # <4>
+ raise
+ else:
+ save_flag(image, cc.lower() + '.gif')
+ status = HTTPStatus.ok
+ msg = 'OK'
+
+ if verbose: # <5>
+ print(cc, msg)
+
+ return Result(status, cc) # <6>
+# END FLAGS2_BASIC_HTTP_FUNCTIONS
+
+# BEGIN FLAGS2_DOWNLOAD_MANY_SEQUENTIAL
+def download_many(cc_list, base_url, verbose, max_req):
+ counter = collections.Counter() # <1>
+ cc_iter = sorted(cc_list) # <2>
+ if not verbose:
+ cc_iter = tqdm.tqdm(cc_iter) # <3>
+ for cc in cc_iter: # <4>
+ try:
+ res = download_one(cc, base_url, verbose) # <5>
+ except requests.exceptions.HTTPError as exc: # <6>
+ error_msg = 'HTTP error {res.status_code} - {res.reason}'
+ error_msg = error_msg.format(res=exc.response)
+ except requests.exceptions.ConnectionError as exc: # <7>
+ error_msg = 'Connection error'
+ else: # <8>
+ error_msg = ''
+ status = res.status
+
+ if error_msg:
+ status = HTTPStatus.error # <9>
+ counter[status] += 1 # <10>
+ if verbose and error_msg: # <11>
+ print('*** Error for {}: {}'.format(cc, error_msg))
+
+ return counter # <12>
+# END FLAGS2_DOWNLOAD_MANY_SEQUENTIAL
+
+if __name__ == '__main__':
+ main(download_many, DEFAULT_CONCUR_REQ, MAX_CONCUR_REQ)
diff --git a/17-futures/countries/flags2_threadpool.py b/17-futures/countries/flags2_threadpool.py
new file mode 100644
index 0000000..069d4ff
--- /dev/null
+++ b/17-futures/countries/flags2_threadpool.py
@@ -0,0 +1,68 @@
+"""Download flags of countries (with error handling).
+
+ThreadPool version
+
+Sample run::
+
+ $ python3 flags2_threadpool.py -s ERROR -e
+ ERROR site: http://localhost:8003/flags
+ Searching for 676 flags: from AA to ZZ
+ 30 concurrent connections will be used.
+ --------------------
+ 150 flags downloaded.
+ 361 not found.
+ 165 errors.
+ Elapsed time: 7.46s
+
+"""
+
+# BEGIN FLAGS2_THREADPOOL
+import collections
+from concurrent import futures
+
+import requests
+import tqdm # <1>
+
+from flags2_common import main, HTTPStatus # <2>
+from flags2_sequential import download_one # <3>
+
+DEFAULT_CONCUR_REQ = 30 # <4>
+MAX_CONCUR_REQ = 1000 # <5>
+
+
+def download_many(cc_list, base_url, verbose, concur_req):
+ counter = collections.Counter()
+ with futures.ThreadPoolExecutor(max_workers=concur_req) as executor: # <6>
+ to_do_map = {} # <7>
+ for cc in sorted(cc_list): # <8>
+ future = executor.submit(download_one,
+ cc, base_url, verbose) # <9>
+ to_do_map[future] = cc # <10>
+ done_iter = futures.as_completed(to_do_map) # <11>
+ if not verbose:
+ done_iter = tqdm.tqdm(done_iter, total=len(cc_list)) # <12>
+ for future in done_iter: # <13>
+ try:
+ res = future.result() # <14>
+ except requests.exceptions.HTTPError as exc: # <15>
+ error_msg = 'HTTP {res.status_code} - {res.reason}'
+ error_msg = error_msg.format(res=exc.response)
+ except requests.exceptions.ConnectionError as exc:
+ error_msg = 'Connection error'
+ else:
+ error_msg = ''
+ status = res.status
+
+ if error_msg:
+ status = HTTPStatus.error
+ counter[status] += 1
+ if verbose and error_msg:
+ cc = to_do_map[future] # <16>
+ print('*** Error for {}: {}'.format(cc, error_msg))
+
+ return counter
+
+
+if __name__ == '__main__':
+ main(download_many, DEFAULT_CONCUR_REQ, MAX_CONCUR_REQ)
+# END FLAGS2_THREADPOOL
diff --git a/17-futures/countries/flags3_asyncio.py b/17-futures/countries/flags3_asyncio.py
new file mode 100644
index 0000000..3340367
--- /dev/null
+++ b/17-futures/countries/flags3_asyncio.py
@@ -0,0 +1,125 @@
+"""Download flags and names of countries.
+
+asyncio version
+"""
+
+import asyncio
+import collections
+
+import aiohttp
+from aiohttp import web
+import tqdm
+
+from flags2_common import main, HTTPStatus, Result, save_flag
+
+# default set low to avoid errors from remote site, such as
+# 503 - Service Temporarily Unavailable
+DEFAULT_CONCUR_REQ = 5
+MAX_CONCUR_REQ = 1000
+
+
+class FetchError(Exception):
+ def __init__(self, country_code):
+ self.country_code = country_code
+
+# BEGIN FLAGS3_ASYNCIO
+@asyncio.coroutine
+def http_get(url):
+ res = yield from aiohttp.request('GET', url)
+ if res.status == 200:
+ ctype = res.headers.get('Content-type', '').lower()
+ if 'json' in ctype or url.endswith('json'):
+ data = yield from res.json() # <1>
+ else:
+ data = yield from res.read() # <2>
+ return data
+
+ elif res.status == 404:
+ raise web.HTTPNotFound()
+ else:
+ raise aiohttp.errors.HttpProcessingError(
+ code=res.status, message=res.reason,
+ headers=res.headers)
+
+
+@asyncio.coroutine
+def get_country(base_url, cc):
+ url = '{}/{cc}/metadata.json'.format(base_url, cc=cc.lower())
+ metadata = yield from http_get(url) # <3>
+ return metadata['country']
+
+
+@asyncio.coroutine
+def get_flag(base_url, cc):
+ url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower())
+ return (yield from http_get(url)) # <4>
+
+
+@asyncio.coroutine
+def download_one(cc, base_url, semaphore, verbose):
+ try:
+ with (yield from semaphore): # <5>
+ image = yield from get_flag(base_url, cc)
+ with (yield from semaphore):
+ country = yield from get_country(base_url, cc)
+ except web.HTTPNotFound:
+ status = HTTPStatus.not_found
+ msg = 'not found'
+ except Exception as exc:
+ raise FetchError(cc) from exc
+ else:
+ country = country.replace(' ', '_')
+ filename = '{}-{}.gif'.format(country, cc)
+ loop = asyncio.get_event_loop()
+ loop.run_in_executor(None, save_flag, image, filename)
+ status = HTTPStatus.ok
+ msg = 'OK'
+
+ if verbose and msg:
+ print(cc, msg)
+
+ return Result(status, cc)
+# END FLAGS3_ASYNCIO
+
+@asyncio.coroutine
+def downloader_coro(cc_list, base_url, verbose, concur_req):
+ counter = collections.Counter()
+ semaphore = asyncio.Semaphore(concur_req)
+ to_do = [download_one(cc, base_url, semaphore, verbose)
+ for cc in sorted(cc_list)]
+
+ to_do_iter = asyncio.as_completed(to_do)
+ if not verbose:
+ to_do_iter = tqdm.tqdm(to_do_iter, total=len(cc_list))
+ for future in to_do_iter:
+ try:
+ res = yield from future
+ except FetchError as exc:
+ country_code = exc.country_code
+ try:
+ error_msg = exc.__cause__.args[0]
+ except IndexError:
+ error_msg = exc.__cause__.__class__.__name__
+ if verbose and error_msg:
+ msg = '*** Error for {}: {}'
+ print(msg.format(country_code, error_msg))
+ status = HTTPStatus.error
+ else:
+ status = res.status
+
+ counter[status] += 1
+
+ return counter
+
+
+def download_many(cc_list, base_url, verbose, concur_req):
+ loop = asyncio.get_event_loop()
+ coro = downloader_coro(cc_list, base_url, verbose, concur_req)
+ counts = loop.run_until_complete(coro)
+ loop.close()
+
+ return counts
+
+
+if __name__ == '__main__':
+ main(download_many, DEFAULT_CONCUR_REQ, MAX_CONCUR_REQ)
diff --git a/17-futures/countries/flags3_threadpool.py b/17-futures/countries/flags3_threadpool.py
new file mode 100644
index 0000000..e5e4b78
--- /dev/null
+++ b/17-futures/countries/flags3_threadpool.py
@@ -0,0 +1,84 @@
+"""Download flags and names of countries.
+
+ThreadPool version
+"""
+
+import collections
+from concurrent import futures
+
+import requests
+import tqdm
+
+from flags2_common import main, save_flag, HTTPStatus, Result
+from flags2_sequential import get_flag
+
+DEFAULT_CONCUR_REQ = 30
+MAX_CONCUR_REQ = 1000
+
+
+def get_country(base_url, cc):
+ url = '{}/{cc}/metadata.json'.format(base_url, cc=cc.lower())
+ res = requests.get(url)
+ if res.status_code != 200:
+ res.raise_for_status()
+ return res.json()['country']
+
+
+def download_one(cc, base_url, verbose=False):
+ try:
+ image = get_flag(base_url, cc)
+ country = get_country(base_url, cc)
+ except requests.exceptions.HTTPError as exc:
+ res = exc.response
+ if res.status_code == 404:
+ status = HTTPStatus.not_found
+ msg = 'not found'
+ else: # <4>
+ raise
+ else:
+ country = country.replace(' ', '_')
+ save_flag(image, '{}-{}.gif'.format(country, cc))
+ status = HTTPStatus.ok
+ msg = 'OK'
+
+ if verbose:
+ print(cc, msg)
+
+ return Result(status, cc)
+
+
+def download_many(cc_list, base_url, verbose, concur_req):
+ counter = collections.Counter()
+ with futures.ThreadPoolExecutor(concur_req) as executor:
+ to_do_map = {}
+ for cc in sorted(cc_list):
+ future = executor.submit(download_one,
+ cc, base_url, verbose)
+ to_do_map[future] = cc
+ to_do_iter = futures.as_completed(to_do_map)
+ if not verbose:
+ to_do_iter = tqdm.tqdm(to_do_iter, total=len(cc_list))
+ for future in to_do_iter:
+ try:
+ res = future.result()
+ except requests.exceptions.HTTPError as exc:
+ error_msg = 'HTTP {res.status_code} - {res.reason}'
+ error_msg = error_msg.format(res=exc.response)
+ except requests.exceptions.ConnectionError as exc:
+ error_msg = 'Connection error'
+ else:
+ error_msg = ''
+ status = res.status
+
+ if error_msg:
+ status = HTTPStatus.error
+ counter[status] += 1
+ if verbose and error_msg:
+ cc = to_do_map[future]
+ print('*** Error for {}: {}'.format(cc, error_msg))
+
+ return counter
+
+
+if __name__ == '__main__':
+ main(download_many, DEFAULT_CONCUR_REQ, MAX_CONCUR_REQ)
diff --git a/17-futures/countries/flags_asyncio.py b/17-futures/countries/flags_asyncio.py
new file mode 100644
index 0000000..394152f
--- /dev/null
+++ b/17-futures/countries/flags_asyncio.py
@@ -0,0 +1,48 @@
+"""Download flags of top 20 countries by population
+
+asyncio + aiottp version
+
+Sample run::
+
+ $ python3 flags_asyncio.py
+ EG VN IN TR RU ID US DE CN MX JP BD NG ET FR BR PH PK CD IR
+ 20 flags downloaded in 1.07s
+
+"""
+# BEGIN FLAGS_ASYNCIO
+import asyncio
+
+import aiohttp # <1>
+
+from flags import BASE_URL, save_flag, show, main # <2>
+
+
+@asyncio.coroutine # <3>
+def get_flag(cc):
+ url = '{}/{cc}/{cc}.gif'.format(BASE_URL, cc=cc.lower())
+ resp = yield from aiohttp.request('GET', url) # <4>
+ image = yield from resp.read() # <5>
+ return image
+
+
+@asyncio.coroutine
+def download_one(cc): # <6>
+ image = yield from get_flag(cc) # <7>
+ show(cc)
+ save_flag(image, cc.lower() + '.gif')
+ return cc
+
+
+def download_many(cc_list):
+ loop = asyncio.get_event_loop() # <8>
+ to_do = [download_one(cc) for cc in sorted(cc_list)] # <9>
+ wait_coro = asyncio.wait(to_do) # <10>
+ res, _ = loop.run_until_complete(wait_coro) # <11>
+ loop.close() # <12>
+
+ return len(res)
+
+
+if __name__ == '__main__':
+ main(download_many)
+# END FLAGS_ASYNCIO
diff --git a/17-futures/countries/flags_await.py b/17-futures/countries/flags_await.py
new file mode 100644
index 0000000..b02dde9
--- /dev/null
+++ b/17-futures/countries/flags_await.py
@@ -0,0 +1,46 @@
+"""Download flags of top 20 countries by population
+
+asyncio + aiottp version
+
+Sample run::
+
+ $ python3 flags_asyncio.py
+ EG VN IN TR RU ID US DE CN MX JP BD NG ET FR BR PH PK CD IR
+ 20 flags downloaded in 1.07s
+
+"""
+# BEGIN FLAGS_ASYNCIO
+import asyncio
+
+import aiohttp # <1>
+
+from flags import BASE_URL, save_flag, show, main # <2>
+
+
+async def get_flag(cc): # <3>
+ url = '{}/{cc}/{cc}.gif'.format(BASE_URL, cc=cc.lower())
+ resp = await aiohttp.request('GET', url) # <4>
+ image = await resp.read() # <5>
+ return image
+
+
+async def download_one(cc): # <6>
+ image = await get_flag(cc) # <7>
+ show(cc)
+ save_flag(image, cc.lower() + '.gif')
+ return cc
+
+
+def download_many(cc_list):
+ loop = asyncio.get_event_loop() # <8>
+ to_do = [download_one(cc) for cc in sorted(cc_list)] # <9>
+ wait_coro = asyncio.wait(to_do) # <10>
+ res, _ = loop.run_until_complete(wait_coro) # <11>
+ loop.close() # <12>
+
+ return len(res)
+
+
+if __name__ == '__main__':
+ main(download_many)
+# END FLAGS_ASYNCIO
diff --git a/17-futures/countries/flags_threadpool.py b/17-futures/countries/flags_threadpool.py
new file mode 100644
index 0000000..eae931e
--- /dev/null
+++ b/17-futures/countries/flags_threadpool.py
@@ -0,0 +1,43 @@
+"""Download flags of top 20 countries by population
+
+ThreadPoolExecutor version
+
+Sample run::
+
+ $ python3 flags_threadpool.py
+ BD retrieved.
+ EG retrieved.
+ CN retrieved.
+ ...
+ PH retrieved.
+ US retrieved.
+ IR retrieved.
+ 20 flags downloaded in 0.93s
+
+"""
+# BEGIN FLAGS_THREADPOOL
+from concurrent import futures
+
+from flags import save_flag, get_flag, show, main # <1>
+
+MAX_WORKERS = 20 # <2>
+
+
+def download_one(cc): # <3>
+ image = get_flag(cc)
+ show(cc)
+ save_flag(image, cc.lower() + '.gif')
+ return cc
+
+
+def download_many(cc_list):
+ workers = min(MAX_WORKERS, len(cc_list)) # <4>
+ with futures.ThreadPoolExecutor(workers) as executor: # <5>
+ res = executor.map(download_one, sorted(cc_list)) # <6>
+
+ return len(list(res)) # <7>
+
+
+if __name__ == '__main__':
+ main(download_many) # <8>
+# END FLAGS_THREADPOOL
diff --git a/17-futures/countries/flags_threadpool_ac.py b/17-futures/countries/flags_threadpool_ac.py
new file mode 100644
index 0000000..d4e3093
--- /dev/null
+++ b/17-futures/countries/flags_threadpool_ac.py
@@ -0,0 +1,55 @@
+"""Download flags of top 20 countries by population
+
+ThreadPoolExecutor version 2, with ``as_completed``.
+
+Sample run::
+
+ $ python3 flags_threadpool.py
+ BD retrieved.
+ EG retrieved.
+ CN retrieved.
+ ...
+ PH retrieved.
+ US retrieved.
+ IR retrieved.
+ 20 flags downloaded in 0.93s
+
+"""
+from concurrent import futures
+
+from flags import save_flag, get_flag, show, main
+
+MAX_WORKERS = 20
+
+
+def download_one(cc):
+ image = get_flag(cc)
+ show(cc)
+ save_flag(image, cc.lower() + '.gif')
+ return cc
+
+
+# BEGIN FLAGS_THREADPOOL_AS_COMPLETED
+def download_many(cc_list):
+ cc_list = cc_list[:5] # <1>
+ with futures.ThreadPoolExecutor(max_workers=3) as executor: # <2>
+ to_do = []
+ for cc in sorted(cc_list): # <3>
+ future = executor.submit(download_one, cc) # <4>
+ to_do.append(future) # <5>
+ msg = 'Scheduled for {}: {}'
+ print(msg.format(cc, future)) # <6>
+
+ results = []
+ for future in futures.as_completed(to_do): # <7>
+ res = future.result() # <8>
+ msg = '{} result: {!r}'
+ print(msg.format(future, res)) # <9>
+ results.append(res)
+
+ return len(results)
+# END FLAGS_THREADPOOL_AS_COMPLETED
+
+if __name__ == '__main__':
+ main(download_many)
+
diff --git a/17-futures/countries/requirements.txt b/17-futures/countries/requirements.txt
new file mode 100644
index 0000000..6f29576
--- /dev/null
+++ b/17-futures/countries/requirements.txt
@@ -0,0 +1,3 @@
+aiohttp==0.13.1
+requests==2.21.0
+tqdm==1.0
diff --git a/17-futures/countries/vaurien_delay.sh b/17-futures/countries/vaurien_delay.sh
new file mode 100755
index 0000000..f5a7ffb
--- /dev/null
+++ b/17-futures/countries/vaurien_delay.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+vaurien --protocol http --backend localhost:8001 \
+ --proxy localhost:8002 \
+ --behavior 100:delay --behavior-delay-sleep .5
diff --git a/17-futures/countries/vaurien_error_delay.sh b/17-futures/countries/vaurien_error_delay.sh
new file mode 100755
index 0000000..f13d02e
--- /dev/null
+++ b/17-futures/countries/vaurien_error_delay.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+vaurien --protocol http --backend localhost:8001 \
+ --proxy localhost:8003 \
+ --behavior 25:error,50:delay --behavior-delay-sleep .5
\ No newline at end of file
diff --git a/17-futures/crypto/arcfour-timings.txt b/17-futures/crypto/arcfour-timings.txt
new file mode 100644
index 0000000..a90ecdb
--- /dev/null
+++ b/17-futures/crypto/arcfour-timings.txt
@@ -0,0 +1,65 @@
+workers|time
+4|4.96
+3|5.40
+2|8.35
+1|11.25
+1|11.17
+2|8.45
+3|6.08
+4|5.83
+4|6.22
+3|7.33
+2|9.48
+1|11.86
+1|11.72
+2|9.22
+3|6.74
+4|6.37
+4|4.94
+3|5.51
+2|8.25
+1|11.47
+1|12.90
+2|8.94
+3|6.44
+4|5.90
+4|5.94
+3|6.46
+2|9.10
+1|11.66
+1|11.48
+2|9.08
+3|6.31
+4|5.99
+4|5.02
+3|5.46
+2|8.26
+1|11.18
+1|11.23
+2|8.52
+3|5.64
+4|5.39
+4|5.53
+3|6.07
+2|8.66
+1|11.42
+1|11.34
+2|8.44
+3|5.88
+4|5.57
+4|4.93
+3|5.47
+2|8.65
+1|11.23
+1|11.12
+2|7.83
+3|5.81
+4|5.45
+4|5.54
+3|6.09
+2|8.84
+1|11.45
+1|11.25
+2|8.32
+3|6.02
+4|5.74
diff --git a/17-futures/crypto/arcfour.py b/17-futures/crypto/arcfour.py
new file mode 100644
index 0000000..7037f71
--- /dev/null
+++ b/17-futures/crypto/arcfour.py
@@ -0,0 +1,55 @@
+"""RC4 compatible algorithm"""
+
+def arcfour(key, in_bytes, loops=20):
+
+ kbox = bytearray(256) # create key box
+ for i, car in enumerate(key): # copy key and vector
+ kbox[i] = car
+ j = len(key)
+ for i in range(j, 256): # repeat until full
+ kbox[i] = kbox[i-j]
+
+ # [1] initialize sbox
+ sbox = bytearray(range(256))
+
+ # repeat sbox mixing loop, as recommened in CipherSaber-2
+ # http://ciphersaber.gurus.com/faq.html#cs2
+ j = 0
+ for k in range(loops):
+ for i in range(256):
+ j = (j + sbox[i] + kbox[i]) % 256
+ sbox[i], sbox[j] = sbox[j], sbox[i]
+
+ # main loop
+ i = 0
+ j = 0
+ out_bytes = bytearray()
+
+ for car in in_bytes:
+ i = (i + 1) % 256
+ # [2] shuffle sbox
+ j = (j + sbox[i]) % 256
+ sbox[i], sbox[j] = sbox[j], sbox[i]
+ # [3] compute t
+ t = (sbox[i] + sbox[j]) % 256
+ k = sbox[t]
+ car = car ^ k
+ out_bytes.append(car)
+
+ return out_bytes
+
+
+def test():
+ from time import time
+ clear = bytearray(b'1234567890' * 100000)
+ t0 = time()
+ cipher = arcfour(b'key', clear)
+ print('elapsed time: %.2fs' % (time() - t0))
+ result = arcfour(b'key', cipher)
+ assert result == clear, '%r != %r' % (result, clear)
+ print('elapsed time: %.2fs' % (time() - t0))
+ print('OK')
+
+
+if __name__ == '__main__':
+ test()
diff --git a/17-futures/crypto/arcfour_futures.py b/17-futures/crypto/arcfour_futures.py
new file mode 100644
index 0000000..a1eea81
--- /dev/null
+++ b/17-futures/crypto/arcfour_futures.py
@@ -0,0 +1,46 @@
+import sys
+import time
+from concurrent import futures
+from random import randrange
+from arcfour import arcfour
+
+JOBS = 12
+SIZE = 2**18
+
+KEY = b"'Twas brillig, and the slithy toves\nDid gyre"
+STATUS = '{} workers, elapsed time: {:.2f}s'
+
+
+def arcfour_test(size, key):
+ in_text = bytearray(randrange(256) for i in range(size))
+ cypher_text = arcfour(key, in_text)
+ out_text = arcfour(key, cypher_text)
+ assert in_text == out_text, 'Failed arcfour_test'
+ return size
+
+
+def main(workers=None):
+ if workers:
+ workers = int(workers)
+ t0 = time.time()
+
+ with futures.ProcessPoolExecutor(workers) as executor:
+ actual_workers = executor._max_workers
+ to_do = []
+ for i in range(JOBS, 0, -1):
+ size = SIZE + int(SIZE / JOBS * (i - JOBS/2))
+ job = executor.submit(arcfour_test, size, KEY)
+ to_do.append(job)
+
+ for future in futures.as_completed(to_do):
+ res = future.result()
+ print('{:.1f} KB'.format(res/2**10))
+
+ print(STATUS.format(actual_workers, time.time() - t0))
+
+if __name__ == '__main__':
+ if len(sys.argv) == 2:
+ workers = int(sys.argv[1])
+ else:
+ workers = None
+ main(workers)
diff --git a/17-futures/crypto/arcfour_test.py b/17-futures/crypto/arcfour_test.py
new file mode 100644
index 0000000..1220730
--- /dev/null
+++ b/17-futures/crypto/arcfour_test.py
@@ -0,0 +1,119 @@
+#!/usr/bin/env python3
+
+from arcfour import arcfour
+
+'''
+Source of the test vectors:
+ A Stream Cipher Encryption Algorithm "Arcfour"
+ http://tools.ietf.org/html/draft-kaukonen-cipher-arcfour-03
+'''
+
+TEST_VECTORS = [
+ ('CRYPTLIB', {
+ 'Plain Text' : (0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00),
+ 'Key' : (0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF),
+ 'Cipher Text' : (0x74, 0x94, 0xC2, 0xE7, 0x10, 0x4B, 0x08, 0x79),
+ }
+ ),
+ ('COMMERCE', {
+ 'Plain Text' : (0xdc, 0xee, 0x4c, 0xf9, 0x2c),
+ 'Key' : (0x61, 0x8a, 0x63, 0xd2, 0xfb),
+ 'Cipher Text' : (0xf1, 0x38, 0x29, 0xc9, 0xde),
+ }
+ ),
+ ('SSH ARCFOUR', {
+ 'Plain Text' : (
+ 0x52, 0x75, 0x69, 0x73, 0x6c, 0x69, 0x6e, 0x6e,
+ 0x75, 0x6e, 0x20, 0x6c, 0x61, 0x75, 0x6c, 0x75,
+ 0x20, 0x6b, 0x6f, 0x72, 0x76, 0x69, 0x73, 0x73,
+ 0x73, 0x61, 0x6e, 0x69, 0x2c, 0x20, 0x74, 0xe4,
+ 0x68, 0x6b, 0xe4, 0x70, 0xe4, 0x69, 0x64, 0x65,
+ 0x6e, 0x20, 0x70, 0xe4, 0xe4, 0x6c, 0x6c, 0xe4,
+ 0x20, 0x74, 0xe4, 0x79, 0x73, 0x69, 0x6b, 0x75,
+ 0x75, 0x2e, 0x20, 0x4b, 0x65, 0x73, 0xe4, 0x79,
+ 0xf6, 0x6e, 0x20, 0x6f, 0x6e, 0x20, 0x6f, 0x6e,
+ 0x6e, 0x69, 0x20, 0x6f, 0x6d, 0x61, 0x6e, 0x61,
+ 0x6e, 0x69, 0x2c, 0x20, 0x6b, 0x61, 0x73, 0x6b,
+ 0x69, 0x73, 0x61, 0x76, 0x75, 0x75, 0x6e, 0x20,
+ 0x6c, 0x61, 0x61, 0x6b, 0x73, 0x6f, 0x74, 0x20,
+ 0x76, 0x65, 0x72, 0x68, 0x6f, 0x75, 0x75, 0x2e,
+ 0x20, 0x45, 0x6e, 0x20, 0x6d, 0x61, 0x20, 0x69,
+ 0x6c, 0x6f, 0x69, 0x74, 0x73, 0x65, 0x2c, 0x20,
+ 0x73, 0x75, 0x72, 0x65, 0x20, 0x68, 0x75, 0x6f,
+ 0x6b, 0x61, 0x61, 0x2c, 0x20, 0x6d, 0x75, 0x74,
+ 0x74, 0x61, 0x20, 0x6d, 0x65, 0x74, 0x73, 0xe4,
+ 0x6e, 0x20, 0x74, 0x75, 0x6d, 0x6d, 0x75, 0x75,
+ 0x73, 0x20, 0x6d, 0x75, 0x6c, 0x6c, 0x65, 0x20,
+ 0x74, 0x75, 0x6f, 0x6b, 0x61, 0x61, 0x2e, 0x20,
+ 0x50, 0x75, 0x75, 0x6e, 0x74, 0x6f, 0x20, 0x70,
+ 0x69, 0x6c, 0x76, 0x65, 0x6e, 0x2c, 0x20, 0x6d,
+ 0x69, 0x20, 0x68, 0x75, 0x6b, 0x6b, 0x75, 0x75,
+ 0x2c, 0x20, 0x73, 0x69, 0x69, 0x6e, 0x74, 0x6f,
+ 0x20, 0x76, 0x61, 0x72, 0x61, 0x6e, 0x20, 0x74,
+ 0x75, 0x75, 0x6c, 0x69, 0x73, 0x65, 0x6e, 0x2c,
+ 0x20, 0x6d, 0x69, 0x20, 0x6e, 0x75, 0x6b, 0x6b,
+ 0x75, 0x75, 0x2e, 0x20, 0x54, 0x75, 0x6f, 0x6b,
+ 0x73, 0x75, 0x74, 0x20, 0x76, 0x61, 0x6e, 0x61,
+ 0x6d, 0x6f, 0x6e, 0x20, 0x6a, 0x61, 0x20, 0x76,
+ 0x61, 0x72, 0x6a, 0x6f, 0x74, 0x20, 0x76, 0x65,
+ 0x65, 0x6e, 0x2c, 0x20, 0x6e, 0x69, 0x69, 0x73,
+ 0x74, 0xe4, 0x20, 0x73, 0x79, 0x64, 0xe4, 0x6d,
+ 0x65, 0x6e, 0x69, 0x20, 0x6c, 0x61, 0x75, 0x6c,
+ 0x75, 0x6e, 0x20, 0x74, 0x65, 0x65, 0x6e, 0x2e,
+ 0x20, 0x2d, 0x20, 0x45, 0x69, 0x6e, 0x6f, 0x20,
+ 0x4c, 0x65, 0x69, 0x6e, 0x6f),
+ 'Key' : (
+ 0x29, 0x04, 0x19, 0x72, 0xfb, 0x42, 0xba, 0x5f,
+ 0xc7, 0x12, 0x77, 0x12, 0xf1, 0x38, 0x29, 0xc9),
+ 'Cipher Text' : (
+ 0x35, 0x81, 0x86, 0x99, 0x90, 0x01, 0xe6, 0xb5,
+ 0xda, 0xf0, 0x5e, 0xce, 0xeb, 0x7e, 0xee, 0x21,
+ 0xe0, 0x68, 0x9c, 0x1f, 0x00, 0xee, 0xa8, 0x1f,
+ 0x7d, 0xd2, 0xca, 0xae, 0xe1, 0xd2, 0x76, 0x3e,
+ 0x68, 0xaf, 0x0e, 0xad, 0x33, 0xd6, 0x6c, 0x26,
+ 0x8b, 0xc9, 0x46, 0xc4, 0x84, 0xfb, 0xe9, 0x4c,
+ 0x5f, 0x5e, 0x0b, 0x86, 0xa5, 0x92, 0x79, 0xe4,
+ 0xf8, 0x24, 0xe7, 0xa6, 0x40, 0xbd, 0x22, 0x32,
+ 0x10, 0xb0, 0xa6, 0x11, 0x60, 0xb7, 0xbc, 0xe9,
+ 0x86, 0xea, 0x65, 0x68, 0x80, 0x03, 0x59, 0x6b,
+ 0x63, 0x0a, 0x6b, 0x90, 0xf8, 0xe0, 0xca, 0xf6,
+ 0x91, 0x2a, 0x98, 0xeb, 0x87, 0x21, 0x76, 0xe8,
+ 0x3c, 0x20, 0x2c, 0xaa, 0x64, 0x16, 0x6d, 0x2c,
+ 0xce, 0x57, 0xff, 0x1b, 0xca, 0x57, 0xb2, 0x13,
+ 0xf0, 0xed, 0x1a, 0xa7, 0x2f, 0xb8, 0xea, 0x52,
+ 0xb0, 0xbe, 0x01, 0xcd, 0x1e, 0x41, 0x28, 0x67,
+ 0x72, 0x0b, 0x32, 0x6e, 0xb3, 0x89, 0xd0, 0x11,
+ 0xbd, 0x70, 0xd8, 0xaf, 0x03, 0x5f, 0xb0, 0xd8,
+ 0x58, 0x9d, 0xbc, 0xe3, 0xc6, 0x66, 0xf5, 0xea,
+ 0x8d, 0x4c, 0x79, 0x54, 0xc5, 0x0c, 0x3f, 0x34,
+ 0x0b, 0x04, 0x67, 0xf8, 0x1b, 0x42, 0x59, 0x61,
+ 0xc1, 0x18, 0x43, 0x07, 0x4d, 0xf6, 0x20, 0xf2,
+ 0x08, 0x40, 0x4b, 0x39, 0x4c, 0xf9, 0xd3, 0x7f,
+ 0xf5, 0x4b, 0x5f, 0x1a, 0xd8, 0xf6, 0xea, 0x7d,
+ 0xa3, 0xc5, 0x61, 0xdf, 0xa7, 0x28, 0x1f, 0x96,
+ 0x44, 0x63, 0xd2, 0xcc, 0x35, 0xa4, 0xd1, 0xb0,
+ 0x34, 0x90, 0xde, 0xc5, 0x1b, 0x07, 0x11, 0xfb,
+ 0xd6, 0xf5, 0x5f, 0x79, 0x23, 0x4d, 0x5b, 0x7c,
+ 0x76, 0x66, 0x22, 0xa6, 0x6d, 0xe9, 0x2b, 0xe9,
+ 0x96, 0x46, 0x1d, 0x5e, 0x4d, 0xc8, 0x78, 0xef,
+ 0x9b, 0xca, 0x03, 0x05, 0x21, 0xe8, 0x35, 0x1e,
+ 0x4b, 0xae, 0xd2, 0xfd, 0x04, 0xf9, 0x46, 0x73,
+ 0x68, 0xc4, 0xad, 0x6a, 0xc1, 0x86, 0xd0, 0x82,
+ 0x45, 0xb2, 0x63, 0xa2, 0x66, 0x6d, 0x1f, 0x6c,
+ 0x54, 0x20, 0xf1, 0x59, 0x9d, 0xfd, 0x9f, 0x43,
+ 0x89, 0x21, 0xc2, 0xf5, 0xa4, 0x63, 0x93, 0x8c,
+ 0xe0, 0x98, 0x22, 0x65, 0xee, 0xf7, 0x01, 0x79,
+ 0xbc, 0x55, 0x3f, 0x33, 0x9e, 0xb1, 0xa4, 0xc1,
+ 0xaf, 0x5f, 0x6a, 0x54, 0x7f),
+ }
+ ),
+]
+
+for name, vectors in TEST_VECTORS:
+ print(name, end='')
+ plain = bytearray(vectors['Plain Text'])
+ cipher = bytearray(vectors['Cipher Text'])
+ key = bytearray(vectors['Key'])
+ assert cipher == arcfour(key, plain, loops=1)
+ assert plain == arcfour(key, cipher, loops=1)
+ print(' --> OK')
diff --git a/17-futures/crypto/sha-timings.txt b/17-futures/crypto/sha-timings.txt
new file mode 100644
index 0000000..a06a798
--- /dev/null
+++ b/17-futures/crypto/sha-timings.txt
@@ -0,0 +1,50 @@
+workers|time
+4|8.88
+3|11.14
+2|13.66
+1|22.80
+1|25.42
+2|16.37
+3|12.09
+4|11.06
+4|11.40
+3|11.51
+2|15.20
+1|24.18
+1|22.09
+2|12.48
+3|10.78
+4|10.48
+4|8.48
+3|10.07
+2|12.42
+1|20.24
+1|20.31
+2|11.39
+3|10.88
+4|10.44
+4|10.43
+3|11.11
+2|12.39
+1|20.69
+1|20.53
+2|11.80
+3|11.01
+4|10.52
+4|11.50
+3|14.45
+2|16.95
+1|24.77
+1|22.71
+2|18.35
+3|12.66
+4|12.20
+4|12.37
+3|13.37
+2|19.30
+1|24.30
+1|23.93
+2|18.51
+3|13.88
+4|12.97
+
diff --git a/17-futures/crypto/sha_futures.py b/17-futures/crypto/sha_futures.py
new file mode 100644
index 0000000..594c6aa
--- /dev/null
+++ b/17-futures/crypto/sha_futures.py
@@ -0,0 +1,38 @@
+import sys
+import time
+import hashlib
+from concurrent import futures
+from random import randrange
+
+JOBS = 12
+SIZE = 2**20
+STATUS = '{} workers, elapsed time: {:.2f}s'
+
+
+def sha(size):
+ data = bytearray(randrange(256) for i in range(size))
+ algo = hashlib.new('sha256')
+ algo.update(data)
+ return algo.hexdigest()
+
+
+def main(workers=None):
+ if workers:
+ workers = int(workers)
+ t0 = time.time()
+
+ with futures.ProcessPoolExecutor(workers) as executor:
+ actual_workers = executor._max_workers
+ to_do = (executor.submit(sha, SIZE) for i in range(JOBS))
+ for future in futures.as_completed(to_do):
+ res = future.result()
+ print(res)
+
+ print(STATUS.format(actual_workers, time.time() - t0))
+
+if __name__ == '__main__':
+ if len(sys.argv) == 2:
+ workers = int(sys.argv[1])
+ else:
+ workers = None
+ main(workers)
diff --git a/17-futures/demo_executor_map.py b/17-futures/demo_executor_map.py
new file mode 100644
index 0000000..f3625cf
--- /dev/null
+++ b/17-futures/demo_executor_map.py
@@ -0,0 +1,34 @@
+"""
+Experiment with ``ThreadPoolExecutor.map``
+"""
+# BEGIN EXECUTOR_MAP
+from time import sleep, strftime
+from concurrent import futures
+
+
+def display(*args): # <1>
+ print(strftime('[%H:%M:%S]'), end=' ')
+ print(*args)
+
+
+def loiter(n): # <2>
+ msg = '{}loiter({}): doing nothing for {}s...'
+ display(msg.format('\t'*n, n, n))
+ sleep(n)
+ msg = '{}loiter({}): done.'
+ display(msg.format('\t'*n, n))
+ return n * 10 # <3>
+
+
+def main():
+ display('Script starting.')
+ executor = futures.ThreadPoolExecutor(max_workers=3) # <4>
+ results = executor.map(loiter, range(5)) # <5>
+ display('results:', results) # <6>.
+ display('Waiting for individual results:')
+ for i, result in enumerate(results): # <7>
+ display('result {}: {}'.format(i, result))
+
+
+main()
+# END EXECUTOR_MAP
diff --git a/18-asyncio-py3.7/README.rst b/18-asyncio-py3.7/README.rst
new file mode 100644
index 0000000..aad52a3
--- /dev/null
+++ b/18-asyncio-py3.7/README.rst
@@ -0,0 +1,11 @@
+Refactored sample code for Chapter 18 - "Concurrency with asyncio"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
+
+##################################################################
+NOTE: this directory contains the examples of chapter 18
+rewritten using the new async/await syntax available from Python
+3.5+, instead of the "yield-from" syntax of Python 3.3 and 3.4.
+The code was tested with Python 3.7
+##################################################################
diff --git a/18-asyncio-py3.7/charfinder/.gitignore b/18-asyncio-py3.7/charfinder/.gitignore
new file mode 100644
index 0000000..f93ecca
--- /dev/null
+++ b/18-asyncio-py3.7/charfinder/.gitignore
@@ -0,0 +1 @@
+charfinder_index.pickle
diff --git a/18-asyncio-py3.7/charfinder/charfinder.py b/18-asyncio-py3.7/charfinder/charfinder.py
new file mode 100755
index 0000000..64e4949
--- /dev/null
+++ b/18-asyncio-py3.7/charfinder/charfinder.py
@@ -0,0 +1,223 @@
+#!/usr/bin/env python3
+
+"""
+Unicode character finder utility:
+find characters based on words in their official names.
+
+This can be used from the command line, just pass words as arguments.
+
+Here is the ``main`` function which makes it happen::
+
+ >>> main('rook') # doctest: +NORMALIZE_WHITESPACE
+ U+2656 ♖ WHITE CHESS ROOK
+ U+265C ♜ BLACK CHESS ROOK
+ (2 matches for 'rook')
+ >>> main('rook', 'black') # doctest: +NORMALIZE_WHITESPACE
+ U+265C ♜ BLACK CHESS ROOK
+ (1 match for 'rook black')
+ >>> main('white bishop') # doctest: +NORMALIZE_WHITESPACE
+ U+2657 ♗ WHITE CHESS BISHOP
+ (1 match for 'white bishop')
+ >>> main("jabberwocky's vest")
+ (No match for "jabberwocky's vest")
+
+
+For exploring words that occur in the character names, there is the
+``word_report`` function::
+
+ >>> index = UnicodeNameIndex(sample_chars)
+ >>> index.word_report()
+ 3 SIGN
+ 2 A
+ 2 EURO
+ 2 LATIN
+ 2 LETTER
+ 1 CAPITAL
+ 1 CURRENCY
+ 1 DOLLAR
+ 1 SMALL
+ >>> index = UnicodeNameIndex()
+ >>> index.word_report(10)
+ 75821 CJK
+ 75761 IDEOGRAPH
+ 74656 UNIFIED
+ 13196 SYLLABLE
+ 11735 HANGUL
+ 7616 LETTER
+ 2232 WITH
+ 2180 SIGN
+ 2122 SMALL
+ 1709 CAPITAL
+
+Note: characters with names starting with 'CJK UNIFIED IDEOGRAPH'
+are indexed with those three words only, excluding the hexadecimal
+codepoint at the end of the name.
+
+"""
+
+import sys
+import re
+import unicodedata
+import pickle
+import warnings
+import itertools
+import functools
+from collections import namedtuple
+
+RE_WORD = re.compile(r'\w+')
+RE_UNICODE_NAME = re.compile('^[A-Z0-9 -]+$')
+RE_CODEPOINT = re.compile(r'U\+([0-9A-F]{4,6})')
+
+INDEX_NAME = 'charfinder_index.pickle'
+MINIMUM_SAVE_LEN = 10000
+CJK_UNI_PREFIX = 'CJK UNIFIED IDEOGRAPH'
+CJK_CMP_PREFIX = 'CJK COMPATIBILITY IDEOGRAPH'
+
+sample_chars = [
+ '$', # DOLLAR SIGN
+ 'A', # LATIN CAPITAL LETTER A
+ 'a', # LATIN SMALL LETTER A
+ '\u20a0', # EURO-CURRENCY SIGN
+ '\u20ac', # EURO SIGN
+]
+
+CharDescription = namedtuple('CharDescription', 'code_str char name')
+
+QueryResult = namedtuple('QueryResult', 'count items')
+
+
+def tokenize(text):
+ """return iterable of uppercased words"""
+ for match in RE_WORD.finditer(text):
+ yield match.group().upper()
+
+
+def query_type(text):
+ text_upper = text.upper()
+ if 'U+' in text_upper:
+ return 'CODEPOINT'
+ elif RE_UNICODE_NAME.match(text_upper):
+ return 'NAME'
+ else:
+ return 'CHARACTERS'
+
+
+class UnicodeNameIndex:
+
+ def __init__(self, chars=None):
+ self.load(chars)
+
+ def load(self, chars=None):
+ self.index = None
+ if chars is None:
+ try:
+ with open(INDEX_NAME, 'rb') as fp:
+ self.index = pickle.load(fp)
+ except OSError:
+ pass
+ if self.index is None:
+ self.build_index(chars)
+ if len(self.index) > MINIMUM_SAVE_LEN:
+ try:
+ self.save()
+ except OSError as exc:
+ warnings.warn('Could not save {!r}: {}'
+ .format(INDEX_NAME, exc))
+
+ def save(self):
+ with open(INDEX_NAME, 'wb') as fp:
+ pickle.dump(self.index, fp)
+
+ def build_index(self, chars=None):
+ if chars is None:
+ chars = (chr(i) for i in range(32, sys.maxunicode))
+ index = {}
+ for char in chars:
+ try:
+ name = unicodedata.name(char)
+ except ValueError:
+ continue
+ if name.startswith(CJK_UNI_PREFIX):
+ name = CJK_UNI_PREFIX
+ elif name.startswith(CJK_CMP_PREFIX):
+ name = CJK_CMP_PREFIX
+
+ for word in tokenize(name):
+ index.setdefault(word, set()).add(char)
+
+ self.index = index
+
+ def word_rank(self, top=None):
+ res = [(len(self.index[key]), key) for key in self.index]
+ res.sort(key=lambda item: (-item[0], item[1]))
+ if top is not None:
+ res = res[:top]
+ return res
+
+ def word_report(self, top=None):
+ for postings, key in self.word_rank(top):
+ print('{:5} {}'.format(postings, key))
+
+ def find_chars(self, query, start=0, stop=None):
+ stop = sys.maxsize if stop is None else stop
+ result_sets = []
+ for word in tokenize(query):
+ chars = self.index.get(word)
+ if chars is None: # shorcut: no such word
+ result_sets = []
+ break
+ result_sets.append(chars)
+
+ if not result_sets:
+ return QueryResult(0, ())
+
+ result = functools.reduce(set.intersection, result_sets)
+ result = sorted(result) # must sort to support start, stop
+ result_iter = itertools.islice(result, start, stop)
+ return QueryResult(len(result),
+ (char for char in result_iter))
+
+ def describe(self, char):
+ code_str = 'U+{:04X}'.format(ord(char))
+ name = unicodedata.name(char)
+ return CharDescription(code_str, char, name)
+
+ def find_descriptions(self, query, start=0, stop=None):
+ for char in self.find_chars(query, start, stop).items:
+ yield self.describe(char)
+
+ def get_descriptions(self, chars):
+ for char in chars:
+ yield self.describe(char)
+
+ def describe_str(self, char):
+ return '{:7}\t{}\t{}'.format(*self.describe(char))
+
+ def find_description_strs(self, query, start=0, stop=None):
+ for char in self.find_chars(query, start, stop).items:
+ yield self.describe_str(char)
+
+ @staticmethod # not an instance method due to concurrency
+ def status(query, counter):
+ if counter == 0:
+ msg = 'No match'
+ elif counter == 1:
+ msg = '1 match'
+ else:
+ msg = '{} matches'.format(counter)
+ return '{} for {!r}'.format(msg, query)
+
+
+def main(*args):
+ index = UnicodeNameIndex()
+ query = ' '.join(args)
+ n = 0
+ for n, line in enumerate(index.find_description_strs(query), 1):
+ print(line)
+ print('({})'.format(index.status(query, n)))
+
+if __name__ == '__main__':
+ if len(sys.argv) > 1:
+ main(*sys.argv[1:])
+ else:
+ print('Usage: {} word1 [word2]...'.format(sys.argv[0]))
diff --git a/18-asyncio-py3.7/charfinder/tcp_charfinder.py b/18-asyncio-py3.7/charfinder/tcp_charfinder.py
new file mode 100755
index 0000000..4980b92
--- /dev/null
+++ b/18-asyncio-py3.7/charfinder/tcp_charfinder.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+
+# BEGIN TCP_CHARFINDER_TOP
+import sys
+import asyncio
+
+from charfinder import UnicodeNameIndex # <1>
+
+CRLF = b'\r\n'
+PROMPT = b'?> '
+
+index = UnicodeNameIndex() # <2>
+
+async def handle_queries(reader, writer): # <3>
+ while True: # <4>
+ writer.write(PROMPT) # can't await! # <5>
+ await writer.drain() # must await! # <6>
+ data = await reader.readline() # <7>
+ try:
+ query = data.decode().strip()
+ except UnicodeDecodeError: # <8>
+ query = '\x00'
+ client = writer.get_extra_info('peername') # <9>
+ print('Received from {}: {!r}'.format(client, query)) # <10>
+ if query:
+ if ord(query[:1]) < 32: # <11>
+ break
+ lines = list(index.find_description_strs(query)) # <12>
+ if lines:
+ writer.writelines(line.encode() + CRLF for line in lines) # <13>
+ writer.write(index.status(query, len(lines)).encode() + CRLF) # <14>
+
+ await writer.drain() # <15>
+ print('Sent {} results'.format(len(lines))) # <16>
+
+ print('Close the client socket') # <17>
+ writer.close() # <18>
+# END TCP_CHARFINDER_TOP
+
+# BEGIN TCP_CHARFINDER_MAIN
+async def main(address='127.0.0.1', port=2323): # <1>
+ port = int(port)
+ server = await asyncio.start_server(handle_queries, address, port) # <2>
+
+ host = server.sockets[0].getsockname() # <3>
+ print('Serving on {}. Hit CTRL-C to stop.'.format(host)) # <4>
+
+ async with server:
+ await server.serve_forever()
+
+
+if __name__ == '__main__':
+ asyncio.run(main(*sys.argv[1:])) # <5>
+# END TCP_CHARFINDER_MAIN
diff --git a/18-asyncio-py3.7/charfinder/test_charfinder.py b/18-asyncio-py3.7/charfinder/test_charfinder.py
new file mode 100644
index 0000000..a6fe729
--- /dev/null
+++ b/18-asyncio-py3.7/charfinder/test_charfinder.py
@@ -0,0 +1,115 @@
+import pytest
+
+from charfinder import UnicodeNameIndex, tokenize, sample_chars, query_type
+from unicodedata import name
+
+
+@pytest.fixture
+def sample_index():
+ return UnicodeNameIndex(sample_chars)
+
+
+@pytest.fixture(scope="module")
+def full_index():
+ return UnicodeNameIndex()
+
+
+def test_query_type():
+ assert query_type('blue') == 'NAME'
+
+
+def test_tokenize():
+ assert list(tokenize('')) == []
+ assert list(tokenize('a b')) == ['A', 'B']
+ assert list(tokenize('a-b')) == ['A', 'B']
+ assert list(tokenize('abc')) == ['ABC']
+ assert list(tokenize('café')) == ['CAFÉ']
+
+
+def test_index():
+ sample_index = UnicodeNameIndex(sample_chars)
+ assert len(sample_index.index) == 9
+
+
+def test_find_word_no_match(sample_index):
+ res = sample_index.find_chars('qwertyuiop')
+ assert len(res.items) == 0
+
+
+def test_find_word_1_match(sample_index):
+ res = [(ord(char), name(char))
+ for char in sample_index.find_chars('currency').items]
+ assert res == [(8352, 'EURO-CURRENCY SIGN')]
+
+
+def test_find_word_1_match_character_result(sample_index):
+ res = [name(char) for char in
+ sample_index.find_chars('currency').items]
+ assert res == ['EURO-CURRENCY SIGN']
+
+
+def test_find_word_2_matches(sample_index):
+ res = [(ord(char), name(char))
+ for char in sample_index.find_chars('Euro').items]
+ assert res == [(8352, 'EURO-CURRENCY SIGN'),
+ (8364, 'EURO SIGN')]
+
+
+def test_find_2_words_no_matches(sample_index):
+ res = sample_index.find_chars('Euro letter')
+ assert res.count == 0
+
+
+def test_find_2_words_no_matches_because_one_not_found(sample_index):
+ res = sample_index.find_chars('letter qwertyuiop')
+ assert res.count == 0
+
+
+def test_find_2_words_1_match(sample_index):
+ res = sample_index.find_chars('sign dollar')
+ assert res.count == 1
+
+
+def test_find_2_words_2_matches(sample_index):
+ res = sample_index.find_chars('latin letter')
+ assert res.count == 2
+
+
+def test_find_chars_many_matches_full(full_index):
+ res = full_index.find_chars('letter')
+ assert res.count > 7000
+
+
+def test_find_1_word_1_match_full(full_index):
+ res = [(ord(char), name(char))
+ for char in full_index.find_chars('registered').items]
+ assert res == [(174, 'REGISTERED SIGN')]
+
+
+def test_find_1_word_2_matches_full(full_index):
+ res = full_index.find_chars('rook')
+ assert res.count == 2
+
+
+def test_find_3_words_no_matches_full(full_index):
+ res = full_index.find_chars('no such character')
+ assert res.count == 0
+
+
+def test_find_with_start(sample_index):
+ res = [(ord(char), name(char))
+ for char in sample_index.find_chars('sign', 1).items]
+ assert res == [(8352, 'EURO-CURRENCY SIGN'), (8364, 'EURO SIGN')]
+
+
+def test_find_with_stop(sample_index):
+ res = [(ord(char), name(char))
+ for char in sample_index.find_chars('sign', 0, 2).items]
+ assert res == [(36, 'DOLLAR SIGN'), (8352, 'EURO-CURRENCY SIGN')]
+
+
+def test_find_with_start_stop(sample_index):
+ res = [(ord(char), name(char))
+ for char in sample_index.find_chars('sign', 1, 2).items]
+ assert res == [(8352, 'EURO-CURRENCY SIGN')]
+
diff --git a/18-asyncio-py3.7/countdown.py b/18-asyncio-py3.7/countdown.py
new file mode 100644
index 0000000..5f09000
--- /dev/null
+++ b/18-asyncio-py3.7/countdown.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python3
+
+# Inspired by
+# https://snarky.ca/how-the-heck-does-async-await-work-in-python-3-5/
+
+import asyncio
+import time
+
+
+async def countdown(label, delay):
+ tabs = (ord(label) - ord('A')) * '\t'
+ n = 3
+ while n > 0:
+ await asyncio.sleep(delay) # <----
+ dt = time.perf_counter() - t0
+ print('━' * 50)
+ print(f'{dt:7.4f}s \t{tabs}{label} = {n}')
+ n -= 1
+
+loop = asyncio.get_event_loop()
+tasks = [
+ loop.create_task(countdown('A', .7)),
+ loop.create_task(countdown('B', 2)),
+ loop.create_task(countdown('C', .3)),
+ loop.create_task(countdown('D', 1)),
+]
+t0 = time.perf_counter()
+loop.run_until_complete(asyncio.wait(tasks))
+loop.close()
+print('━' * 50)
diff --git a/18-asyncio-py3.7/countries/README.rst b/18-asyncio-py3.7/countries/README.rst
new file mode 100644
index 0000000..4945806
--- /dev/null
+++ b/18-asyncio-py3.7/countries/README.rst
@@ -0,0 +1,3 @@
+The ``asyncio`` flag download examples are in the
+``../../17-futures/countries/`` directory together
+with the sequential and threadpool examples.
diff --git a/18-asyncio-py3.7/spinner_asyncio.py b/18-asyncio-py3.7/spinner_asyncio.py
new file mode 100755
index 0000000..369a8f0
--- /dev/null
+++ b/18-asyncio-py3.7/spinner_asyncio.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python3
+
+# spinner_asyncio.py
+
+# credits: Example by Luciano Ramalho inspired by
+# Michele Simionato's multiprocessing example in the python-list:
+# https://mail.python.org/pipermail/python-list/2009-February/538048.html
+
+# BEGIN SPINNER_ASYNCIO
+import asyncio
+import itertools
+
+
+async def spin(msg): # <1>
+ for char in itertools.cycle('|/-\\'):
+ status = char + ' ' + msg
+ print(status, flush=True, end='\r')
+ try:
+ await asyncio.sleep(.1) # <2>
+ except asyncio.CancelledError: # <3>
+ break
+ print(' ' * len(status), end='\r')
+
+
+async def slow_function(): # <4>
+ # pretend waiting a long time for I/O
+ await asyncio.sleep(3) # <5>
+ return 42
+
+
+async def supervisor(): # <6>
+ spinner = asyncio.create_task(spin('thinking!')) # <7>
+ print('spinner object:', spinner) # <8>
+ result = await slow_function() # <9>
+ spinner.cancel() # <10>
+ return result
+
+
+def main():
+ result = asyncio.run(supervisor()) # <11>
+ print('Answer:', result)
+
+
+if __name__ == '__main__':
+ main()
+# END SPINNER_ASYNCIO
diff --git a/18-asyncio-py3.7/spinner_thread.py b/18-asyncio-py3.7/spinner_thread.py
new file mode 100755
index 0000000..bffc921
--- /dev/null
+++ b/18-asyncio-py3.7/spinner_thread.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+
+# spinner_thread.py
+
+# credits: Adapted from Michele Simionato's
+# multiprocessing example in the python-list:
+# https://mail.python.org/pipermail/python-list/2009-February/538048.html
+
+# BEGIN SPINNER_THREAD
+import threading
+import itertools
+import time
+
+
+def spin(msg, done): # <1>
+ for char in itertools.cycle('|/-\\'): # <3>
+ status = char + ' ' + msg
+ print(status, flush=True, end='\r')
+ if done.wait(.1): # <5>
+ break
+ print(' ' * len(status), end='\r')
+
+def slow_function(): # <7>
+ # pretend waiting a long time for I/O
+ time.sleep(3) # <8>
+ return 42
+
+
+def supervisor(): # <9>
+ done = threading.Event()
+ spinner = threading.Thread(target=spin,
+ args=('thinking!', done))
+ print('spinner object:', spinner) # <10>
+ spinner.start() # <11>
+ result = slow_function() # <12>
+ done.set() # <13>
+ spinner.join() # <14>
+ return result
+
+
+def main():
+ result = supervisor() # <15>
+ print('Answer:', result)
+
+
+if __name__ == '__main__':
+ main()
+# END SPINNER_THREAD
diff --git a/18-asyncio/README.rst b/18-asyncio/README.rst
new file mode 100644
index 0000000..f04d6bd
--- /dev/null
+++ b/18-asyncio/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 18 - "Concurrency with asyncio"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/18-asyncio/charfinder/charfinder.py b/18-asyncio/charfinder/charfinder.py
new file mode 100755
index 0000000..c061f90
--- /dev/null
+++ b/18-asyncio/charfinder/charfinder.py
@@ -0,0 +1,223 @@
+#!/usr/bin/env python3
+
+"""
+Unicode character finder utility:
+find characters based on words in their official names.
+
+This can be used from the command line, just pass words as arguments.
+
+Here is the ``main`` function which makes it happen::
+
+ >>> main('rook') # doctest: +NORMALIZE_WHITESPACE
+ U+2656 ♖ WHITE CHESS ROOK
+ U+265C ♜ BLACK CHESS ROOK
+ (2 matches for 'rook')
+ >>> main('rook', 'black') # doctest: +NORMALIZE_WHITESPACE
+ U+265C ♜ BLACK CHESS ROOK
+ (1 match for 'rook black')
+ >>> main('white bishop') # doctest: +NORMALIZE_WHITESPACE
+ U+2657 ♗ WHITE CHESS BISHOP
+ (1 match for 'white bishop')
+ >>> main("jabberwocky's vest")
+ (No match for "jabberwocky's vest")
+
+
+For exploring words that occur in the character names, there is the
+``word_report`` function::
+
+ >>> index = UnicodeNameIndex(sample_chars)
+ >>> index.word_report()
+ 3 SIGN
+ 2 A
+ 2 EURO
+ 2 LATIN
+ 2 LETTER
+ 1 CAPITAL
+ 1 CURRENCY
+ 1 DOLLAR
+ 1 SMALL
+ >>> index = UnicodeNameIndex()
+ >>> index.word_report(10)
+ 75821 CJK
+ 75761 IDEOGRAPH
+ 74656 UNIFIED
+ 13196 SYLLABLE
+ 11735 HANGUL
+ 7616 LETTER
+ 2232 WITH
+ 2180 SIGN
+ 2122 SMALL
+ 1709 CAPITAL
+
+Note: characters with names starting with 'CJK UNIFIED IDEOGRAPH'
+are indexed with those three words only, excluding the hexadecimal
+codepoint at the end of the name.
+
+"""
+
+import sys
+import re
+import unicodedata
+import pickle
+import warnings
+import itertools
+import functools
+from collections import namedtuple
+
+RE_WORD = re.compile(r'\w+')
+RE_UNICODE_NAME = re.compile('^[A-Z0-9 -]+$')
+RE_CODEPOINT = re.compile('U\+([0-9A-F]{4,6})')
+
+INDEX_NAME = 'charfinder_index.pickle'
+MINIMUM_SAVE_LEN = 10000
+CJK_UNI_PREFIX = 'CJK UNIFIED IDEOGRAPH'
+CJK_CMP_PREFIX = 'CJK COMPATIBILITY IDEOGRAPH'
+
+sample_chars = [
+ '$', # DOLLAR SIGN
+ 'A', # LATIN CAPITAL LETTER A
+ 'a', # LATIN SMALL LETTER A
+ '\u20a0', # EURO-CURRENCY SIGN
+ '\u20ac', # EURO SIGN
+]
+
+CharDescription = namedtuple('CharDescription', 'code_str char name')
+
+QueryResult = namedtuple('QueryResult', 'count items')
+
+
+def tokenize(text):
+ """return iterable of uppercased words"""
+ for match in RE_WORD.finditer(text):
+ yield match.group().upper()
+
+
+def query_type(text):
+ text_upper = text.upper()
+ if 'U+' in text_upper:
+ return 'CODEPOINT'
+ elif RE_UNICODE_NAME.match(text_upper):
+ return 'NAME'
+ else:
+ return 'CHARACTERS'
+
+
+class UnicodeNameIndex:
+
+ def __init__(self, chars=None):
+ self.load(chars)
+
+ def load(self, chars=None):
+ self.index = None
+ if chars is None:
+ try:
+ with open(INDEX_NAME, 'rb') as fp:
+ self.index = pickle.load(fp)
+ except OSError:
+ pass
+ if self.index is None:
+ self.build_index(chars)
+ if len(self.index) > MINIMUM_SAVE_LEN:
+ try:
+ self.save()
+ except OSError as exc:
+ warnings.warn('Could not save {!r}: {}'
+ .format(INDEX_NAME, exc))
+
+ def save(self):
+ with open(INDEX_NAME, 'wb') as fp:
+ pickle.dump(self.index, fp)
+
+ def build_index(self, chars=None):
+ if chars is None:
+ chars = (chr(i) for i in range(32, sys.maxunicode))
+ index = {}
+ for char in chars:
+ try:
+ name = unicodedata.name(char)
+ except ValueError:
+ continue
+ if name.startswith(CJK_UNI_PREFIX):
+ name = CJK_UNI_PREFIX
+ elif name.startswith(CJK_CMP_PREFIX):
+ name = CJK_CMP_PREFIX
+
+ for word in tokenize(name):
+ index.setdefault(word, set()).add(char)
+
+ self.index = index
+
+ def word_rank(self, top=None):
+ res = [(len(self.index[key]), key) for key in self.index]
+ res.sort(key=lambda item: (-item[0], item[1]))
+ if top is not None:
+ res = res[:top]
+ return res
+
+ def word_report(self, top=None):
+ for postings, key in self.word_rank(top):
+ print('{:5} {}'.format(postings, key))
+
+ def find_chars(self, query, start=0, stop=None):
+ stop = sys.maxsize if stop is None else stop
+ result_sets = []
+ for word in tokenize(query):
+ chars = self.index.get(word)
+ if chars is None: # shortcut: no such word
+ result_sets = []
+ break
+ result_sets.append(chars)
+
+ if not result_sets:
+ return QueryResult(0, ())
+
+ result = functools.reduce(set.intersection, result_sets)
+ result = sorted(result) # must sort to support start, stop
+ result_iter = itertools.islice(result, start, stop)
+ return QueryResult(len(result),
+ (char for char in result_iter))
+
+ def describe(self, char):
+ code_str = 'U+{:04X}'.format(ord(char))
+ name = unicodedata.name(char)
+ return CharDescription(code_str, char, name)
+
+ def find_descriptions(self, query, start=0, stop=None):
+ for char in self.find_chars(query, start, stop).items:
+ yield self.describe(char)
+
+ def get_descriptions(self, chars):
+ for char in chars:
+ yield self.describe(char)
+
+ def describe_str(self, char):
+ return '{:7}\t{}\t{}'.format(*self.describe(char))
+
+ def find_description_strs(self, query, start=0, stop=None):
+ for char in self.find_chars(query, start, stop).items:
+ yield self.describe_str(char)
+
+ @staticmethod # not an instance method due to concurrency
+ def status(query, counter):
+ if counter == 0:
+ msg = 'No match'
+ elif counter == 1:
+ msg = '1 match'
+ else:
+ msg = '{} matches'.format(counter)
+ return '{} for {!r}'.format(msg, query)
+
+
+def main(*args):
+ index = UnicodeNameIndex()
+ query = ' '.join(args)
+ n = 0
+ for n, line in enumerate(index.find_description_strs(query), 1):
+ print(line)
+ print('({})'.format(index.status(query, n)))
+
+if __name__ == '__main__':
+ if len(sys.argv) > 1:
+ main(*sys.argv[1:])
+ else:
+ print('Usage: {} word1 [word2]...'.format(sys.argv[0]))
diff --git a/18-asyncio/charfinder/http_charfinder.html b/18-asyncio/charfinder/http_charfinder.html
new file mode 100644
index 0000000..43b9cd7
--- /dev/null
+++ b/18-asyncio/charfinder/http_charfinder.html
@@ -0,0 +1,19 @@
+
+
+
+
+ Charfinder
+
+
+ Examples: {links}
+
+
+
+
+
+
diff --git a/18-asyncio/charfinder/http_charfinder.py b/18-asyncio/charfinder/http_charfinder.py
new file mode 100755
index 0000000..d7e8ca5
--- /dev/null
+++ b/18-asyncio/charfinder/http_charfinder.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python3
+
+import sys
+import asyncio
+from aiohttp import web
+
+from charfinder import UnicodeNameIndex
+
+TEMPLATE_NAME = 'http_charfinder.html'
+CONTENT_TYPE = 'text/html; charset=UTF-8'
+SAMPLE_WORDS = ('bismillah chess cat circled Malayalam digit'
+ ' Roman face Ethiopic black mark symbol dot'
+ ' operator Braille hexagram').split()
+
+ROW_TPL = '| {code_str} | {char} | {name} |
'
+LINK_TPL = '{0}'
+LINKS_HTML = ', '.join(LINK_TPL.format(word) for word in
+ sorted(SAMPLE_WORDS, key=str.upper))
+
+
+index = UnicodeNameIndex()
+with open(TEMPLATE_NAME) as tpl:
+ template = tpl.read()
+template = template.replace('{links}', LINKS_HTML)
+
+# BEGIN HTTP_CHARFINDER_HOME
+def home(request): # <1>
+ query = request.GET.get('query', '').strip() # <2>
+ print('Query: {!r}'.format(query)) # <3>
+ if query: # <4>
+ descriptions = list(index.find_descriptions(query))
+ res = '\n'.join(ROW_TPL.format(**descr._asdict())
+ for descr in descriptions)
+ msg = index.status(query, len(descriptions))
+ else:
+ descriptions = []
+ res = ''
+ msg = 'Enter words describing characters.'
+
+ html = template.format(query=query, result=res, # <5>
+ message=msg)
+ print('Sending {} results'.format(len(descriptions))) # <6>
+ return web.Response(content_type=CONTENT_TYPE, text=html) # <7>
+# END HTTP_CHARFINDER_HOME
+
+
+# BEGIN HTTP_CHARFINDER_SETUP
+@asyncio.coroutine
+def init(loop, address, port): # <1>
+ app = web.Application(loop=loop) # <2>
+ app.router.add_route('GET', '/', home) # <3>
+ handler = app.make_handler() # <4>
+ server = yield from loop.create_server(handler,
+ address, port) # <5>
+ return server.sockets[0].getsockname() # <6>
+
+def main(address="127.0.0.1", port=8888):
+ port = int(port)
+ loop = asyncio.get_event_loop()
+ host = loop.run_until_complete(init(loop, address, port)) # <7>
+ print('Serving on {}. Hit CTRL-C to stop.'.format(host))
+ try:
+ loop.run_forever() # <8>
+ except KeyboardInterrupt: # CTRL+C pressed
+ pass
+ print('Server shutting down.')
+ loop.close() # <9>
+
+
+if __name__ == '__main__':
+ main(*sys.argv[1:])
+# END HTTP_CHARFINDER_SETUP
diff --git a/18-asyncio/charfinder/requirements.txt b/18-asyncio/charfinder/requirements.txt
new file mode 100644
index 0000000..2d16731
--- /dev/null
+++ b/18-asyncio/charfinder/requirements.txt
@@ -0,0 +1 @@
+aiohttp==0.13.1
diff --git a/18-asyncio/charfinder/tcp_charfinder.py b/18-asyncio/charfinder/tcp_charfinder.py
new file mode 100755
index 0000000..dad76a7
--- /dev/null
+++ b/18-asyncio/charfinder/tcp_charfinder.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python3
+
+# BEGIN TCP_CHARFINDER_TOP
+import sys
+import asyncio
+
+from charfinder import UnicodeNameIndex # <1>
+
+CRLF = b'\r\n'
+PROMPT = b'?> '
+
+index = UnicodeNameIndex() # <2>
+
+@asyncio.coroutine
+def handle_queries(reader, writer): # <3>
+ while True: # <4>
+ writer.write(PROMPT) # can't yield from! # <5>
+ yield from writer.drain() # must yield from! # <6>
+ data = yield from reader.readline() # <7>
+ try:
+ query = data.decode().strip()
+ except UnicodeDecodeError: # <8>
+ query = '\x00'
+ client = writer.get_extra_info('peername') # <9>
+ print('Received from {}: {!r}'.format(client, query)) # <10>
+ if query:
+ if ord(query[:1]) < 32: # <11>
+ break
+ lines = list(index.find_description_strs(query)) # <12>
+ if lines:
+ writer.writelines(line.encode() + CRLF for line in lines) # <13>
+ writer.write(index.status(query, len(lines)).encode() + CRLF) # <14>
+
+ yield from writer.drain() # <15>
+ print('Sent {} results'.format(len(lines))) # <16>
+
+ print('Close the client socket') # <17>
+ writer.close() # <18>
+# END TCP_CHARFINDER_TOP
+
+# BEGIN TCP_CHARFINDER_MAIN
+def main(address='127.0.0.1', port=2323): # <1>
+ port = int(port)
+ loop = asyncio.get_event_loop()
+ server_coro = asyncio.start_server(handle_queries, address, port,
+ loop=loop) # <2>
+ server = loop.run_until_complete(server_coro) # <3>
+
+ host = server.sockets[0].getsockname() # <4>
+ print('Serving on {}. Hit CTRL-C to stop.'.format(host)) # <5>
+ try:
+ loop.run_forever() # <6>
+ except KeyboardInterrupt: # CTRL+C pressed
+ pass
+
+ print('Server shutting down.')
+ server.close() # <7>
+ loop.run_until_complete(server.wait_closed()) # <8>
+ loop.close() # <9>
+
+
+if __name__ == '__main__':
+ main(*sys.argv[1:]) # <10>
+# END TCP_CHARFINDER_MAIN
diff --git a/18-asyncio/charfinder/test_charfinder.py b/18-asyncio/charfinder/test_charfinder.py
new file mode 100644
index 0000000..a6fe729
--- /dev/null
+++ b/18-asyncio/charfinder/test_charfinder.py
@@ -0,0 +1,115 @@
+import pytest
+
+from charfinder import UnicodeNameIndex, tokenize, sample_chars, query_type
+from unicodedata import name
+
+
+@pytest.fixture
+def sample_index():
+ return UnicodeNameIndex(sample_chars)
+
+
+@pytest.fixture(scope="module")
+def full_index():
+ return UnicodeNameIndex()
+
+
+def test_query_type():
+ assert query_type('blue') == 'NAME'
+
+
+def test_tokenize():
+ assert list(tokenize('')) == []
+ assert list(tokenize('a b')) == ['A', 'B']
+ assert list(tokenize('a-b')) == ['A', 'B']
+ assert list(tokenize('abc')) == ['ABC']
+ assert list(tokenize('café')) == ['CAFÉ']
+
+
+def test_index():
+ sample_index = UnicodeNameIndex(sample_chars)
+ assert len(sample_index.index) == 9
+
+
+def test_find_word_no_match(sample_index):
+ res = sample_index.find_chars('qwertyuiop')
+ assert len(res.items) == 0
+
+
+def test_find_word_1_match(sample_index):
+ res = [(ord(char), name(char))
+ for char in sample_index.find_chars('currency').items]
+ assert res == [(8352, 'EURO-CURRENCY SIGN')]
+
+
+def test_find_word_1_match_character_result(sample_index):
+ res = [name(char) for char in
+ sample_index.find_chars('currency').items]
+ assert res == ['EURO-CURRENCY SIGN']
+
+
+def test_find_word_2_matches(sample_index):
+ res = [(ord(char), name(char))
+ for char in sample_index.find_chars('Euro').items]
+ assert res == [(8352, 'EURO-CURRENCY SIGN'),
+ (8364, 'EURO SIGN')]
+
+
+def test_find_2_words_no_matches(sample_index):
+ res = sample_index.find_chars('Euro letter')
+ assert res.count == 0
+
+
+def test_find_2_words_no_matches_because_one_not_found(sample_index):
+ res = sample_index.find_chars('letter qwertyuiop')
+ assert res.count == 0
+
+
+def test_find_2_words_1_match(sample_index):
+ res = sample_index.find_chars('sign dollar')
+ assert res.count == 1
+
+
+def test_find_2_words_2_matches(sample_index):
+ res = sample_index.find_chars('latin letter')
+ assert res.count == 2
+
+
+def test_find_chars_many_matches_full(full_index):
+ res = full_index.find_chars('letter')
+ assert res.count > 7000
+
+
+def test_find_1_word_1_match_full(full_index):
+ res = [(ord(char), name(char))
+ for char in full_index.find_chars('registered').items]
+ assert res == [(174, 'REGISTERED SIGN')]
+
+
+def test_find_1_word_2_matches_full(full_index):
+ res = full_index.find_chars('rook')
+ assert res.count == 2
+
+
+def test_find_3_words_no_matches_full(full_index):
+ res = full_index.find_chars('no such character')
+ assert res.count == 0
+
+
+def test_find_with_start(sample_index):
+ res = [(ord(char), name(char))
+ for char in sample_index.find_chars('sign', 1).items]
+ assert res == [(8352, 'EURO-CURRENCY SIGN'), (8364, 'EURO SIGN')]
+
+
+def test_find_with_stop(sample_index):
+ res = [(ord(char), name(char))
+ for char in sample_index.find_chars('sign', 0, 2).items]
+ assert res == [(36, 'DOLLAR SIGN'), (8352, 'EURO-CURRENCY SIGN')]
+
+
+def test_find_with_start_stop(sample_index):
+ res = [(ord(char), name(char))
+ for char in sample_index.find_chars('sign', 1, 2).items]
+ assert res == [(8352, 'EURO-CURRENCY SIGN')]
+
diff --git a/18-asyncio/countries/README.rst b/18-asyncio/countries/README.rst
new file mode 100644
index 0000000..4945806
--- /dev/null
+++ b/18-asyncio/countries/README.rst
@@ -0,0 +1,3 @@
+The ``asyncio`` flag download examples are in the
+``../../17-futures/countries/`` directory together
+with the sequential and threadpool examples.
diff --git a/18-asyncio/spinner_asyncio.py b/18-asyncio/spinner_asyncio.py
new file mode 100755
index 0000000..aeb2b55
--- /dev/null
+++ b/18-asyncio/spinner_asyncio.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python3
+
+# spinner_asyncio.py
+
+# credits: Example by Luciano Ramalho inspired by
+# Michele Simionato's multiprocessing example in the python-list:
+# https://mail.python.org/pipermail/python-list/2009-February/538048.html
+
+# BEGIN SPINNER_ASYNCIO
+import asyncio
+import itertools
+import sys
+
+
+@asyncio.coroutine # <1>
+def spin(msg): # <2>
+ write, flush = sys.stdout.write, sys.stdout.flush
+ for char in itertools.cycle('|/-\\'):
+ status = char + ' ' + msg
+ write(status)
+ flush()
+ write('\x08' * len(status))
+ try:
+ yield from asyncio.sleep(.1) # <3>
+ except asyncio.CancelledError: # <4>
+ break
+ write(' ' * len(status) + '\x08' * len(status))
+
+
+@asyncio.coroutine
+def slow_function(): # <5>
+ # pretend waiting a long time for I/O
+ yield from asyncio.sleep(3) # <6>
+ return 42
+
+
+@asyncio.coroutine
+def supervisor(): # <7>
+ spinner = asyncio.async(spin('thinking!')) # <8>
+ print('spinner object:', spinner) # <9>
+ result = yield from slow_function() # <10>
+ spinner.cancel() # <11>
+ return result
+
+
+def main():
+ loop = asyncio.get_event_loop() # <12>
+ result = loop.run_until_complete(supervisor()) # <13>
+ loop.close()
+ print('Answer:', result)
+
+
+if __name__ == '__main__':
+ main()
+# END SPINNER_ASYNCIO
diff --git a/18-asyncio/spinner_await.py b/18-asyncio/spinner_await.py
new file mode 100755
index 0000000..ad9c9b0
--- /dev/null
+++ b/18-asyncio/spinner_await.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python3
+
+# spinner_await.py
+
+# credits: Example by Luciano Ramalho inspired by
+# Michele Simionato's multiprocessing example in the python-list:
+# https://mail.python.org/pipermail/python-list/2009-February/538048.html
+
+import asyncio
+import itertools
+import sys
+
+
+async def spin(msg): # <1>
+ write, flush = sys.stdout.write, sys.stdout.flush
+ for char in itertools.cycle('|/-\\'):
+ status = char + ' ' + msg
+ write(status)
+ flush()
+ write('\x08' * len(status))
+ try:
+ await asyncio.sleep(.1) # <2>
+ except asyncio.CancelledError: # <3>
+ break
+ write(' ' * len(status) + '\x08' * len(status))
+
+
+async def slow_function(): # <4>
+ # pretend waiting a long time for I/O
+ await asyncio.sleep(3) # <5>
+ return 42
+
+
+async def supervisor(): # <6>
+ spinner = asyncio.ensure_future(spin('thinking!')) # <7>
+ print('spinner object:', spinner) # <8>
+ result = await slow_function() # <9>
+ spinner.cancel() # <10>
+ return result
+
+
+def main():
+ loop = asyncio.get_event_loop() # <11>
+ result = loop.run_until_complete(supervisor()) # <12>
+ loop.close()
+ print('Answer:', result)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/18-asyncio/spinner_curio.py b/18-asyncio/spinner_curio.py
new file mode 100755
index 0000000..9475b7c
--- /dev/null
+++ b/18-asyncio/spinner_curio.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python3
+
+# spinner_curio.py
+
+# credits: Example by Luciano Ramalho inspired by
+# Michele Simionato's multiprocessing example in the python-list:
+# https://mail.python.org/pipermail/python-list/2009-February/538048.html
+
+import curio
+
+import itertools
+import sys
+
+
+async def spin(msg): # <1>
+ write, flush = sys.stdout.write, sys.stdout.flush
+ for char in itertools.cycle('|/-\\'):
+ status = char + ' ' + msg
+ write(status)
+ flush()
+ write('\x08' * len(status))
+ try:
+ await curio.sleep(.1) # <2>
+ except curio.CancelledError: # <3>
+ break
+ write(' ' * len(status) + '\x08' * len(status))
+
+
+async def slow_function(): # <4>
+ # pretend waiting a long time for I/O
+ await curio.sleep(3) # <5>
+ return 42
+
+
+async def supervisor(): # <6>
+ spinner = await curio.spawn(spin('thinking!')) # <7>
+ print('spinner object:\n ', repr(spinner)) # <8>
+ result = await slow_function() # <9>
+ await spinner.cancel() # <10>
+ return result
+
+
+def main():
+ result = curio.run(supervisor) # <12>
+ print('Answer:', result)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/18-asyncio/spinner_thread.py b/18-asyncio/spinner_thread.py
new file mode 100755
index 0000000..dffcca6
--- /dev/null
+++ b/18-asyncio/spinner_thread.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python3
+
+# spinner_thread.py
+
+# credits: Adapted from Michele Simionato's
+# multiprocessing example in the python-list:
+# https://mail.python.org/pipermail/python-list/2009-February/538048.html
+
+# BEGIN SPINNER_THREAD
+import threading
+import itertools
+import time
+import sys
+
+
+def spin(msg, done): # <2>
+ write, flush = sys.stdout.write, sys.stdout.flush
+ for char in itertools.cycle('|/-\\'): # <3>
+ status = char + ' ' + msg
+ write(status)
+ flush()
+ write('\x08' * len(status)) # <4>
+ if done.wait(.1): # <5>
+ break
+ write(' ' * len(status) + '\x08' * len(status)) # <6>
+
+
+def slow_function(): # <7>
+ # pretend waiting a long time for I/O
+ time.sleep(3) # <8>
+ return 42
+
+
+def supervisor(): # <9>
+ done = threading.Event()
+ spinner = threading.Thread(target=spin,
+ args=('thinking!', done))
+ print('spinner object:', spinner) # <10>
+ spinner.start() # <11>
+ result = slow_function() # <12>
+ done.set() # <13>
+ spinner.join() # <14>
+ return result
+
+
+def main():
+ result = supervisor() # <15>
+ print('Answer:', result)
+
+
+if __name__ == '__main__':
+ main()
+# END SPINNER_THREAD
diff --git a/19-dyn-attr-prop/README.rst b/19-dyn-attr-prop/README.rst
new file mode 100644
index 0000000..3a87af1
--- /dev/null
+++ b/19-dyn-attr-prop/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 19 - "Dynamic attributes and properties"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/metaprog/blackknight.py b/19-dyn-attr-prop/blackknight.py
similarity index 100%
rename from metaprog/blackknight.py
rename to 19-dyn-attr-prop/blackknight.py
diff --git a/descriptors/bulkfood_v1.py b/19-dyn-attr-prop/bulkfood/bulkfood_v1.py
similarity index 100%
rename from descriptors/bulkfood_v1.py
rename to 19-dyn-attr-prop/bulkfood/bulkfood_v1.py
diff --git a/descriptors/bulkfood_v2.py b/19-dyn-attr-prop/bulkfood/bulkfood_v2.py
similarity index 100%
rename from descriptors/bulkfood_v2.py
rename to 19-dyn-attr-prop/bulkfood/bulkfood_v2.py
diff --git a/descriptors/bulkfood_v2b.py b/19-dyn-attr-prop/bulkfood/bulkfood_v2b.py
similarity index 100%
rename from descriptors/bulkfood_v2b.py
rename to 19-dyn-attr-prop/bulkfood/bulkfood_v2b.py
diff --git a/descriptors/bulkfood_v2prop.py b/19-dyn-attr-prop/bulkfood/bulkfood_v2prop.py
similarity index 100%
rename from descriptors/bulkfood_v2prop.py
rename to 19-dyn-attr-prop/bulkfood/bulkfood_v2prop.py
diff --git a/metaprog/doc_property.py b/19-dyn-attr-prop/doc_property.py
similarity index 100%
rename from metaprog/doc_property.py
rename to 19-dyn-attr-prop/doc_property.py
diff --git a/metaprog/oscon-schedule/data/osconfeed.json b/19-dyn-attr-prop/oscon/data/osconfeed.json
similarity index 100%
rename from metaprog/oscon-schedule/data/osconfeed.json
rename to 19-dyn-attr-prop/oscon/data/osconfeed.json
diff --git a/metaprog/oscon-schedule/demo_schedule2.py b/19-dyn-attr-prop/oscon/demo_schedule2.py
similarity index 100%
rename from metaprog/oscon-schedule/demo_schedule2.py
rename to 19-dyn-attr-prop/oscon/demo_schedule2.py
diff --git a/metaprog/oscon-schedule/explore1.py b/19-dyn-attr-prop/oscon/explore0.py
similarity index 86%
rename from metaprog/oscon-schedule/explore1.py
rename to 19-dyn-attr-prop/oscon/explore0.py
index 49a95f8..9dc589d 100644
--- a/metaprog/oscon-schedule/explore1.py
+++ b/19-dyn-attr-prop/oscon/explore0.py
@@ -1,7 +1,7 @@
"""
-explore1.py: Script to explore the OSCON schedule feed
+explore0.py: Script to explore the OSCON schedule feed
-# BEGIN EXPLORE1_DEMO
+# BEGIN EXPLORE0_DEMO
>>> from osconfeed import load
>>> raw_feed = load()
>>> feed = FrozenJSON(raw_feed) # <1>
@@ -18,7 +18,9 @@
53 venues
>>> feed.Schedule.speakers[-1].name # <5>
'Carina C. Zona'
- >>> talk = feed.Schedule.events[40] # <6>
+ >>> talk = feed.Schedule.events[40]
+ >>> type(talk) # <6>
+
>>> talk.name
'There *Will* Be Bugs'
>>> talk.speakers # <7>
@@ -28,10 +30,11 @@
...
KeyError: 'flavor'
-# END EXPLORE1_DEMO
+# END EXPLORE0_DEMO
+
"""
-# BEGIN EXPLORE1
+# BEGIN EXPLORE0
from collections import abc
@@ -57,4 +60,4 @@ def build(cls, obj): # <5>
return [cls.build(item) for item in obj]
else: # <8>
return obj
-# END EXPLORE1
+# END EXPLORE0
diff --git a/19-dyn-attr-prop/oscon/explore1.py b/19-dyn-attr-prop/oscon/explore1.py
new file mode 100644
index 0000000..5ef0968
--- /dev/null
+++ b/19-dyn-attr-prop/oscon/explore1.py
@@ -0,0 +1,78 @@
+"""
+explore1.py: Script to explore the OSCON schedule feed
+
+ >>> from osconfeed import load
+ >>> raw_feed = load()
+ >>> feed = FrozenJSON(raw_feed)
+ >>> len(feed.Schedule.speakers)
+ 357
+ >>> sorted(feed.Schedule.keys())
+ ['conferences', 'events', 'speakers', 'venues']
+ >>> for key, value in sorted(feed.Schedule.items()):
+ ... print('{:3} {}'.format(len(value), key))
+ ...
+ 1 conferences
+ 484 events
+ 357 speakers
+ 53 venues
+ >>> feed.Schedule.speakers[-1].name
+ 'Carina C. Zona'
+ >>> talk = feed.Schedule.events[40]
+ >>> type(talk)
+
+ >>> talk.name
+ 'There *Will* Be Bugs'
+ >>> talk.speakers
+ [3471, 5199]
+ >>> talk.flavor
+ Traceback (most recent call last):
+ ...
+ KeyError: 'flavor'
+
+Handle keywords by appending a `_`.
+
+# BEGIN EXPLORE1_DEMO
+
+ >>> grad = FrozenJSON({'name': 'Jim Bo', 'class': 1982})
+ >>> grad.name
+ 'Jim Bo'
+ >>> grad.class_
+ 1991
+
+# END EXPLORE1_DEMO
+
+"""
+
+from collections import abc
+import keyword
+
+
+class FrozenJSON:
+ """A read-only façade for navigating a JSON-like object
+ using attribute notation
+ """
+
+# BEGIN EXPLORE1
+ def __init__(self, mapping):
+ self.__data = {}
+ for key, value in mapping.items():
+ if keyword.iskeyword(key): # <1>
+ key += '_'
+ self.__data[key] = value
+# END EXPLORE1
+
+ def __getattr__(self, name):
+ if hasattr(self.__data, name):
+ return getattr(self.__data, name)
+ else:
+ return FrozenJSON.build(self.__data[name])
+
+ @classmethod
+ def build(cls, obj):
+ if isinstance(obj, abc.Mapping):
+ return cls(obj)
+ elif isinstance(obj, abc.MutableSequence):
+ return [cls.build(item) for item in obj]
+ else: # <8>
+ return obj
+
diff --git a/metaprog/oscon-schedule/explore2.py b/19-dyn-attr-prop/oscon/explore2.py
similarity index 88%
rename from metaprog/oscon-schedule/explore2.py
rename to 19-dyn-attr-prop/oscon/explore2.py
index 19040d2..8d0088c 100644
--- a/metaprog/oscon-schedule/explore2.py
+++ b/19-dyn-attr-prop/oscon/explore2.py
@@ -40,7 +40,11 @@ def __new__(cls, arg): # <1>
return arg
def __init__(self, mapping):
- self.__data = dict(mapping)
+ self.__data = {}
+ for key, value in mapping.items():
+ if iskeyword(key):
+ key += '_'
+ self.__data[key] = value
def __getattr__(self, name):
if hasattr(self.__data, name):
diff --git a/metaprog/oscon-schedule/osconfeed-sample.json b/19-dyn-attr-prop/oscon/osconfeed-sample.json
similarity index 100%
rename from metaprog/oscon-schedule/osconfeed-sample.json
rename to 19-dyn-attr-prop/oscon/osconfeed-sample.json
diff --git a/metaprog/oscon-schedule/osconfeed.py b/19-dyn-attr-prop/oscon/osconfeed.py
similarity index 100%
rename from metaprog/oscon-schedule/osconfeed.py
rename to 19-dyn-attr-prop/oscon/osconfeed.py
diff --git a/metaprog/oscon-schedule/schedule1.py b/19-dyn-attr-prop/oscon/schedule1.py
similarity index 100%
rename from metaprog/oscon-schedule/schedule1.py
rename to 19-dyn-attr-prop/oscon/schedule1.py
diff --git a/metaprog/oscon-schedule/schedule2.py b/19-dyn-attr-prop/oscon/schedule2.py
similarity index 100%
rename from metaprog/oscon-schedule/schedule2.py
rename to 19-dyn-attr-prop/oscon/schedule2.py
diff --git a/metaprog/oscon-schedule/test_schedule1.py b/19-dyn-attr-prop/oscon/test_schedule1.py
similarity index 83%
rename from metaprog/oscon-schedule/test_schedule1.py
rename to 19-dyn-attr-prop/oscon/test_schedule1.py
index dbaacc9..ba5dfd1 100644
--- a/metaprog/oscon-schedule/test_schedule1.py
+++ b/19-dyn-attr-prop/oscon/test_schedule1.py
@@ -1,15 +1,20 @@
+import os
import shelve
+
import pytest
import schedule1 as schedule
+DB_NAME = 'data/test_db'
+
-@pytest.yield_fixture
+@pytest.fixture(scope='module')
def db():
- with shelve.open(schedule.DB_NAME) as the_db:
+ with shelve.open(DB_NAME) as the_db:
if schedule.CONFERENCE not in the_db:
schedule.load_db(the_db)
yield the_db
+ os.remove(DB_NAME)
def test_record_class():
diff --git a/metaprog/oscon-schedule/test_schedule2.py b/19-dyn-attr-prop/oscon/test_schedule2.py
similarity index 93%
rename from metaprog/oscon-schedule/test_schedule2.py
rename to 19-dyn-attr-prop/oscon/test_schedule2.py
index de09d32..ab1c79c 100644
--- a/metaprog/oscon-schedule/test_schedule2.py
+++ b/19-dyn-attr-prop/oscon/test_schedule2.py
@@ -1,15 +1,18 @@
+import os
import shelve
+
import pytest
import schedule2 as schedule
-@pytest.yield_fixture
+@pytest.fixture(scope='module')
def db():
- with shelve.open(schedule.DB_NAME) as the_db:
+ with shelve.open(DB_NAME) as the_db:
if schedule.CONFERENCE not in the_db:
schedule.load_db(the_db)
yield the_db
+ os.remove(DB_NAME)
def test_record_attr_access():
diff --git a/metaprog/pseudo_construction.py b/19-dyn-attr-prop/pseudo_construction.py
similarity index 100%
rename from metaprog/pseudo_construction.py
rename to 19-dyn-attr-prop/pseudo_construction.py
diff --git a/20-descriptor/README.rst b/20-descriptor/README.rst
new file mode 100644
index 0000000..d0cad28
--- /dev/null
+++ b/20-descriptor/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 20 - "Attribute descriptors"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/descriptors/bulkfood_v3.py b/20-descriptor/bulkfood/bulkfood_v3.py
similarity index 100%
rename from descriptors/bulkfood_v3.py
rename to 20-descriptor/bulkfood/bulkfood_v3.py
diff --git a/descriptors/bulkfood_v4.py b/20-descriptor/bulkfood/bulkfood_v4.py
similarity index 89%
rename from descriptors/bulkfood_v4.py
rename to 20-descriptor/bulkfood/bulkfood_v4.py
index 864908f..ca52b60 100644
--- a/descriptors/bulkfood_v4.py
+++ b/20-descriptor/bulkfood/bulkfood_v4.py
@@ -29,11 +29,11 @@
>>> raisins = LineItem('Golden raisins', 10, 6.95)
>>> dir(raisins) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
- ['_Quantity:0', '_Quantity:1', '__class__', ...
+ ['_Quantity#0', '_Quantity#1', '__class__', ...
'description', 'price', 'subtotal', 'weight']
- >>> getattr(raisins, '_Quantity:0')
+ >>> getattr(raisins, '_Quantity#0')
10
- >>> getattr(raisins, '_Quantity:1')
+ >>> getattr(raisins, '_Quantity#1')
6.95
"""
@@ -47,7 +47,7 @@ def __init__(self):
cls = self.__class__ # <2>
prefix = cls.__name__
index = cls.__counter
- self.storage_name = '_{}:{}'.format(prefix, index) # <3>
+ self.storage_name = '_{}#{}'.format(prefix, index) # <3>
cls.__counter += 1 # <4>
def __get__(self, instance, owner): # <5>
diff --git a/descriptors/bulkfood_v4b.py b/20-descriptor/bulkfood/bulkfood_v4b.py
similarity index 85%
rename from descriptors/bulkfood_v4b.py
rename to 20-descriptor/bulkfood/bulkfood_v4b.py
index e97ba6b..86bc413 100644
--- a/descriptors/bulkfood_v4b.py
+++ b/20-descriptor/bulkfood/bulkfood_v4b.py
@@ -29,21 +29,20 @@
>>> raisins = LineItem('Golden raisins', 10, 6.95)
>>> dir(raisins) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
- ['_Quantity:0', '_Quantity:1', '__class__', ...
+ ['_Quantity#0', '_Quantity#1', '__class__', ...
'description', 'price', 'subtotal', 'weight']
- >>> getattr(raisins, '_Quantity:0')
+ >>> getattr(raisins, '_Quantity#0')
10
- >>> getattr(raisins, '_Quantity:1')
+ >>> getattr(raisins, '_Quantity#1')
6.95
If the descriptor is accessed in the class, the descriptor object is
returned:
- >>> LineItem.price # doctest: +ELLIPSIS
+ >>> LineItem.weight # doctest: +ELLIPSIS
- >>> br_nuts = LineItem('Brazil nuts', 10, 34.95)
- >>> br_nuts.price
- 34.95
+ >>> LineItem.weight.storage_name
+ '_Quantity#0'
"""
@@ -56,7 +55,7 @@ def __init__(self):
cls = self.__class__
prefix = cls.__name__
index = cls.__counter
- self.storage_name = '_{}:{}'.format(prefix, index)
+ self.storage_name = '_{}#{}'.format(prefix, index)
cls.__counter += 1
def __get__(self, instance, owner):
diff --git a/descriptors/bulkfood_v4c.py b/20-descriptor/bulkfood/bulkfood_v4c.py
similarity index 84%
rename from descriptors/bulkfood_v4c.py
rename to 20-descriptor/bulkfood/bulkfood_v4c.py
index 6d1df5d..a3f37b8 100644
--- a/descriptors/bulkfood_v4c.py
+++ b/20-descriptor/bulkfood/bulkfood_v4c.py
@@ -29,21 +29,21 @@
>>> raisins = LineItem('Golden raisins', 10, 6.95)
>>> dir(raisins) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
- ['_Quantity:0', '_Quantity:1', '__class__', ...
+ ['_Quantity#0', '_Quantity#1', '__class__', ...
'description', 'price', 'subtotal', 'weight']
- >>> getattr(raisins, '_Quantity:0')
+ >>> getattr(raisins, '_Quantity#0')
10
- >>> getattr(raisins, '_Quantity:1')
+ >>> getattr(raisins, '_Quantity#1')
6.95
If the descriptor is accessed in the class, the descriptor object is
returned:
- >>> LineItem.price # doctest: +ELLIPSIS
+ >>> LineItem.weight # doctest: +ELLIPSIS
- >>> br_nuts = LineItem('Brazil nuts', 10, 34.95)
- >>> br_nuts.price
- 34.95
+ >>> LineItem.weight.storage_name
+ '_Quantity#0'
+
"""
diff --git a/descriptors/bulkfood_v4prop.py b/20-descriptor/bulkfood/bulkfood_v4prop.py
similarity index 100%
rename from descriptors/bulkfood_v4prop.py
rename to 20-descriptor/bulkfood/bulkfood_v4prop.py
diff --git a/descriptors/bulkfood_v5.py b/20-descriptor/bulkfood/bulkfood_v5.py
similarity index 79%
rename from descriptors/bulkfood_v5.py
rename to 20-descriptor/bulkfood/bulkfood_v5.py
index 523e98f..49c6f68 100644
--- a/descriptors/bulkfood_v5.py
+++ b/20-descriptor/bulkfood/bulkfood_v5.py
@@ -29,29 +29,33 @@
>>> raisins = LineItem('Golden raisins', 10, 6.95)
>>> dir(raisins) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
- ['_NonBlank:0', '_Quantity:0', '_Quantity:1', '__class__', ...
+ ['_NonBlank#0', '_Quantity#0', '_Quantity#1', '__class__', ...
'description', 'price', 'subtotal', 'weight']
- >>> getattr(raisins, '_Quantity:0')
+ >>> getattr(raisins, '_Quantity#0')
10
- >>> getattr(raisins, '_NonBlank:0')
+ >>> getattr(raisins, '_NonBlank#0')
'Golden raisins'
If the descriptor is accessed in the class, the descriptor object is
returned:
- >>> LineItem.price # doctest: +ELLIPSIS
+ >>> LineItem.weight # doctest: +ELLIPSIS
- >>> br_nuts = LineItem('Brazil nuts', 10, 34.95)
- >>> br_nuts.price
- 34.95
+ >>> LineItem.weight.storage_name
+ '_Quantity#0'
The `NonBlank` descriptor prevents empty or blank strings to be used
for the description:
+ >>> br_nuts = LineItem('Brazil Nuts', 10, 34.95)
>>> br_nuts.description = ' '
Traceback (most recent call last):
...
ValueError: value cannot be empty or blank
+ >>> void = LineItem('', 1, 1)
+ Traceback (most recent call last):
+ ...
+ ValueError: value cannot be empty or blank
"""
diff --git a/descriptors/bulkfood_v6.py b/20-descriptor/bulkfood/bulkfood_v5_check.py
similarity index 79%
rename from descriptors/bulkfood_v6.py
rename to 20-descriptor/bulkfood/bulkfood_v5_check.py
index 04587ed..b402471 100644
--- a/descriptors/bulkfood_v6.py
+++ b/20-descriptor/bulkfood/bulkfood_v5_check.py
@@ -29,33 +29,37 @@
>>> raisins = LineItem('Golden raisins', 10, 6.95)
>>> dir(raisins) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
- ['_Check:0', '_Check:1', '_Check:2', '__class__', ...
+ ['_Check#0', '_Check#1', '_Check#2', '__class__', ...
'description', 'price', 'subtotal', 'weight']
- >>> [getattr(raisins, name) for name in dir(raisins) if name.startswith('_Check:')]
+ >>> [getattr(raisins, name) for name in dir(raisins) if name.startswith('_Check#')]
['Golden raisins', 10, 6.95]
If the descriptor is accessed in the class, the descriptor object is
returned:
- >>> LineItem.price # doctest: +ELLIPSIS
-
- >>> br_nuts = LineItem('Brazil nuts', 10, 34.95)
- >>> br_nuts.price
- 34.95
+ >>> LineItem.weight # doctest: +ELLIPSIS
+
+ >>> LineItem.weight.storage_name
+ '_Check#1'
The `NonBlank` descriptor prevents empty or blank strings to be used
for the description:
+ >>> br_nuts = LineItem('Brazil Nuts', 10, 34.95)
>>> br_nuts.description = ' '
Traceback (most recent call last):
...
ValueError: ' ' is not valid.
+ >>> void = LineItem('', 1, 1)
+ Traceback (most recent call last):
+ ...
+ ValueError: '' is not valid.
+
"""
-# BEGIN LINEITEM_V5
-import model_v6 as model # <1>
+import model_v5_check as model
def gt_zero(x):
'''value must be > 0'''
@@ -67,7 +71,7 @@ def non_blank(txt):
class LineItem:
- description = model.Check(non_blank) # <2>
+ description = model.Check(non_blank)
weight = model.Check(gt_zero)
price = model.Check(gt_zero)
@@ -78,4 +82,4 @@ def __init__(self, description, weight, price):
def subtotal(self):
return self.weight * self.price
-# END LINEITEM_V5
+
diff --git a/descriptors/model_v4c.py b/20-descriptor/bulkfood/model_v4c.py
similarity index 90%
rename from descriptors/model_v4c.py
rename to 20-descriptor/bulkfood/model_v4c.py
index e17a8ff..5d1c8bc 100644
--- a/descriptors/model_v4c.py
+++ b/20-descriptor/bulkfood/model_v4c.py
@@ -6,7 +6,7 @@ def __init__(self):
cls = self.__class__
prefix = cls.__name__
index = cls.__counter
- self.storage_name = '_{}:{}'.format(prefix, index)
+ self.storage_name = '_{}#{}'.format(prefix, index)
cls.__counter += 1
def __get__(self, instance, owner):
diff --git a/descriptors/model_v5.py b/20-descriptor/bulkfood/model_v5.py
similarity index 95%
rename from descriptors/model_v5.py
rename to 20-descriptor/bulkfood/model_v5.py
index da6b63c..4ce47dd 100644
--- a/descriptors/model_v5.py
+++ b/20-descriptor/bulkfood/model_v5.py
@@ -9,7 +9,7 @@ def __init__(self):
cls = self.__class__
prefix = cls.__name__
index = cls.__counter
- self.storage_name = '_{}:{}'.format(prefix, index)
+ self.storage_name = '_{}#{}'.format(prefix, index)
cls.__counter += 1
def __get__(self, instance, owner):
diff --git a/descriptors/model_v6.py b/20-descriptor/bulkfood/model_v5_check.py
similarity index 58%
rename from descriptors/model_v6.py
rename to 20-descriptor/bulkfood/model_v5_check.py
index d0623af..7e2a293 100644
--- a/descriptors/model_v6.py
+++ b/20-descriptor/bulkfood/model_v5_check.py
@@ -1,15 +1,14 @@
-# BEGIN MODEL_V5
import abc
-class AutoStorage: # <1>
+class AutoStorage:
__counter = 0
def __init__(self):
cls = self.__class__
prefix = cls.__name__
index = cls.__counter
- self.storage_name = '_{}:{}'.format(prefix, index)
+ self.storage_name = '_{}#{}'.format(prefix, index)
cls.__counter += 1
def __get__(self, instance, owner):
@@ -19,17 +18,17 @@ def __get__(self, instance, owner):
return getattr(instance, self.storage_name)
def __set__(self, instance, value):
- setattr(instance, self.storage_name, value) # <2>
+ setattr(instance, self.storage_name, value)
-class Validated(abc.ABC, AutoStorage): # <3>
+class Validated(abc.ABC, AutoStorage):
def __set__(self, instance, value):
- value = self.validate(instance, value) # <4>
- super().__set__(instance, value) # <5>
+ value = self.validate(instance, value)
+ super().__set__(instance, value)
@abc.abstractmethod
- def validate(self, instance, value): # <6>
+ def validate(self, instance, value):
"""return validated value or raise ValueError"""
INVALID = object()
@@ -51,22 +50,3 @@ def validate(self, instance, value):
if result is INVALID:
raise ValueError(self.message.format(value))
return result
-
-
-class Quantity(Validated): # <7>
-
- def validate(self, instance, value):
- if value <= 0:
- raise ValueError('value must be > 0')
- return value
-
-
-class NonBlank(Validated):
-
- def validate(self, instance, value):
- value = value.strip()
- if len(value) == 0:
- raise ValueError('value cannot be empty or blank')
- return value # <8>
-
-# END MODEL_V5
diff --git a/descriptors/descriptorkinds.py b/20-descriptor/descriptorkinds.py
similarity index 100%
rename from descriptors/descriptorkinds.py
rename to 20-descriptor/descriptorkinds.py
diff --git a/descriptors/descriptorkinds_dump.py b/20-descriptor/descriptorkinds_dump.py
similarity index 100%
rename from descriptors/descriptorkinds_dump.py
rename to 20-descriptor/descriptorkinds_dump.py
diff --git a/descriptors/method_is_descriptor.py b/20-descriptor/method_is_descriptor.py
similarity index 100%
rename from descriptors/method_is_descriptor.py
rename to 20-descriptor/method_is_descriptor.py
diff --git a/21-class-metaprog/README.rst b/21-class-metaprog/README.rst
new file mode 100644
index 0000000..d2e20d8
--- /dev/null
+++ b/21-class-metaprog/README.rst
@@ -0,0 +1,4 @@
+Sample code for Chapter 21 - "Class metaprogramming"
+
+From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
+http://shop.oreilly.com/product/0636920032519.do
diff --git a/21-class-metaprog/bulkfood/bulkfood_v6.py b/21-class-metaprog/bulkfood/bulkfood_v6.py
new file mode 100644
index 0000000..dd24ba9
--- /dev/null
+++ b/21-class-metaprog/bulkfood/bulkfood_v6.py
@@ -0,0 +1,84 @@
+"""
+
+A line item for a bulk food order has description, weight and price fields::
+
+ >>> raisins = LineItem('Golden raisins', 10, 6.95)
+ >>> raisins.weight, raisins.description, raisins.price
+ (10, 'Golden raisins', 6.95)
+
+A ``subtotal`` method gives the total price for that line item::
+
+ >>> raisins.subtotal()
+ 69.5
+
+The weight of a ``LineItem`` must be greater than 0::
+
+ >>> raisins.weight = -20
+ Traceback (most recent call last):
+ ...
+ ValueError: value must be > 0
+
+No change was made::
+
+ >>> raisins.weight
+ 10
+
+The value of the attributes managed by the descriptors are stored in
+alternate attributes, created by the descriptors in each ``LineItem``
+instance::
+
+# BEGIN LINEITEM_V6_DEMO
+ >>> raisins = LineItem('Golden raisins', 10, 6.95)
+ >>> dir(raisins)[:3]
+ ['_NonBlank#description', '_Quantity#price', '_Quantity#weight']
+ >>> LineItem.description.storage_name
+ '_NonBlank#description'
+ >>> raisins.description
+ 'Golden raisins'
+ >>> getattr(raisins, '_NonBlank#description')
+ 'Golden raisins'
+
+# END LINEITEM_V6_DEMO
+
+If the descriptor is accessed in the class, the descriptor object is
+returned:
+
+ >>> LineItem.weight # doctest: +ELLIPSIS
+
+ >>> LineItem.weight.storage_name
+ '_Quantity#weight'
+
+
+The `NonBlank` descriptor prevents empty or blank strings to be used
+for the description:
+
+ >>> br_nuts = LineItem('Brazil Nuts', 10, 34.95)
+ >>> br_nuts.description = ' '
+ Traceback (most recent call last):
+ ...
+ ValueError: value cannot be empty or blank
+ >>> void = LineItem('', 1, 1)
+ Traceback (most recent call last):
+ ...
+ ValueError: value cannot be empty or blank
+
+
+"""
+
+# BEGIN LINEITEM_V6
+import model_v6 as model
+
+@model.entity # <1>
+class LineItem:
+ description = model.NonBlank()
+ weight = model.Quantity()
+ price = model.Quantity()
+
+ def __init__(self, description, weight, price):
+ self.description = description
+ self.weight = weight
+ self.price = price
+
+ def subtotal(self):
+ return self.weight * self.price
+# END LINEITEM_V6
diff --git a/21-class-metaprog/bulkfood/bulkfood_v7.py b/21-class-metaprog/bulkfood/bulkfood_v7.py
new file mode 100644
index 0000000..3d4d578
--- /dev/null
+++ b/21-class-metaprog/bulkfood/bulkfood_v7.py
@@ -0,0 +1,79 @@
+"""
+
+A line item for a bulk food order has description, weight and price fields::
+
+ >>> raisins = LineItem('Golden raisins', 10, 6.95)
+ >>> raisins.weight, raisins.description, raisins.price
+ (10, 'Golden raisins', 6.95)
+
+A ``subtotal`` method gives the total price for that line item::
+
+ >>> raisins.subtotal()
+ 69.5
+
+The weight of a ``LineItem`` must be greater than 0::
+
+ >>> raisins.weight = -20
+ Traceback (most recent call last):
+ ...
+ ValueError: value must be > 0
+
+No change was made::
+
+ >>> raisins.weight
+ 10
+
+The value of the attributes managed by the descriptors are stored in
+alternate attributes, created by the descriptors in each ``LineItem``
+instance::
+
+ >>> raisins = LineItem('Golden raisins', 10, 6.95)
+ >>> dir(raisins)[:3]
+ ['_NonBlank#description', '_Quantity#price', '_Quantity#weight']
+ >>> LineItem.description.storage_name
+ '_NonBlank#description'
+ >>> raisins.description
+ 'Golden raisins'
+ >>> getattr(raisins, '_NonBlank#description')
+ 'Golden raisins'
+
+If the descriptor is accessed in the class, the descriptor object is
+returned:
+
+ >>> LineItem.weight # doctest: +ELLIPSIS
+
+ >>> LineItem.weight.storage_name
+ '_Quantity#weight'
+
+
+The `NonBlank` descriptor prevents empty or blank strings to be used
+for the description:
+
+ >>> br_nuts = LineItem('Brazil Nuts', 10, 34.95)
+ >>> br_nuts.description = ' '
+ Traceback (most recent call last):
+ ...
+ ValueError: value cannot be empty or blank
+ >>> void = LineItem('', 1, 1)
+ Traceback (most recent call last):
+ ...
+ ValueError: value cannot be empty or blank
+
+"""
+
+# BEGIN LINEITEM_V7
+import model_v7 as model
+
+class LineItem(model.Entity): # <1>
+ description = model.NonBlank()
+ weight = model.Quantity()
+ price = model.Quantity()
+
+ def __init__(self, description, weight, price):
+ self.description = description
+ self.weight = weight
+ self.price = price
+
+ def subtotal(self):
+ return self.weight * self.price
+# END LINEITEM_V7
diff --git a/21-class-metaprog/bulkfood/bulkfood_v8.py b/21-class-metaprog/bulkfood/bulkfood_v8.py
new file mode 100644
index 0000000..2595b64
--- /dev/null
+++ b/21-class-metaprog/bulkfood/bulkfood_v8.py
@@ -0,0 +1,86 @@
+"""
+
+A line item for a bulk food order has description, weight and price fields::
+
+ >>> raisins = LineItem('Golden raisins', 10, 6.95)
+ >>> raisins.weight, raisins.description, raisins.price
+ (10, 'Golden raisins', 6.95)
+
+A ``subtotal`` method gives the total price for that line item::
+
+ >>> raisins.subtotal()
+ 69.5
+
+The weight of a ``LineItem`` must be greater than 0::
+
+ >>> raisins.weight = -20
+ Traceback (most recent call last):
+ ...
+ ValueError: value must be > 0
+
+No change was made::
+
+ >>> raisins.weight
+ 10
+
+ >>> raisins = LineItem('Golden raisins', 10, 6.95)
+ >>> dir(raisins)[:3]
+ ['_NonBlank#description', '_Quantity#price', '_Quantity#weight']
+ >>> LineItem.description.storage_name
+ '_NonBlank#description'
+ >>> raisins.description
+ 'Golden raisins'
+ >>> getattr(raisins, '_NonBlank#description')
+ 'Golden raisins'
+
+If the descriptor is accessed in the class, the descriptor object is
+returned:
+
+ >>> LineItem.weight # doctest: +ELLIPSIS
+
+ >>> LineItem.weight.storage_name
+ '_Quantity#weight'
+
+
+The `NonBlank` descriptor prevents empty or blank strings to be used
+for the description:
+
+ >>> br_nuts = LineItem('Brazil Nuts', 10, 34.95)
+ >>> br_nuts.description = ' '
+ Traceback (most recent call last):
+ ...
+ ValueError: value cannot be empty or blank
+ >>> void = LineItem('', 1, 1)
+ Traceback (most recent call last):
+ ...
+ ValueError: value cannot be empty or blank
+
+
+Fields can be retrieved in the order they were declared:
+
+# BEGIN LINEITEM_V8_DEMO
+ >>> for name in LineItem.field_names():
+ ... print(name)
+ ...
+ description
+ weight
+ price
+
+# END LINEITEM_V8_DEMO
+
+"""
+
+import model_v8 as model
+
+class LineItem(model.Entity):
+ description = model.NonBlank()
+ weight = model.Quantity()
+ price = model.Quantity()
+
+ def __init__(self, description, weight, price):
+ self.description = description
+ self.weight = weight
+ self.price = price
+
+ def subtotal(self):
+ return self.weight * self.price
diff --git a/21-class-metaprog/bulkfood/model_v6.py b/21-class-metaprog/bulkfood/model_v6.py
new file mode 100644
index 0000000..29c6c44
--- /dev/null
+++ b/21-class-metaprog/bulkfood/model_v6.py
@@ -0,0 +1,60 @@
+import abc
+
+
+class AutoStorage:
+ __counter = 0
+
+ def __init__(self):
+ cls = self.__class__
+ prefix = cls.__name__
+ index = cls.__counter
+ self.storage_name = '_{}#{}'.format(prefix, index)
+ cls.__counter += 1
+
+ def __get__(self, instance, owner):
+ if instance is None:
+ return self
+ else:
+ return getattr(instance, self.storage_name)
+
+ def __set__(self, instance, value):
+ setattr(instance, self.storage_name, value)
+
+
+class Validated(abc.ABC, AutoStorage):
+
+ def __set__(self, instance, value):
+ value = self.validate(instance, value)
+ super().__set__(instance, value)
+
+ @abc.abstractmethod
+ def validate(self, instance, value):
+ """return validated value or raise ValueError"""
+
+
+class Quantity(Validated):
+ """a number greater than zero"""
+
+ def validate(self, instance, value):
+ if value <= 0:
+ raise ValueError('value must be > 0')
+ return value
+
+
+class NonBlank(Validated):
+ """a string with at least one non-space character"""
+
+ def validate(self, instance, value):
+ value = value.strip()
+ if len(value) == 0:
+ raise ValueError('value cannot be empty or blank')
+ return value
+
+# BEGIN MODEL_V6
+def entity(cls): # <1>
+ for key, attr in cls.__dict__.items(): # <2>
+ if isinstance(attr, Validated): # <3>
+ type_name = type(attr).__name__
+ attr.storage_name = '_{}#{}'.format(type_name, key) # <4>
+ return cls # <5>
+# END MODEL_V6
diff --git a/21-class-metaprog/bulkfood/model_v7.py b/21-class-metaprog/bulkfood/model_v7.py
new file mode 100644
index 0000000..310eedc
--- /dev/null
+++ b/21-class-metaprog/bulkfood/model_v7.py
@@ -0,0 +1,66 @@
+import abc
+
+
+class AutoStorage:
+ __counter = 0
+
+ def __init__(self):
+ cls = self.__class__
+ prefix = cls.__name__
+ index = cls.__counter
+ self.storage_name = '_{}#{}'.format(prefix, index)
+ cls.__counter += 1
+
+ def __get__(self, instance, owner):
+ if instance is None:
+ return self
+ else:
+ return getattr(instance, self.storage_name)
+
+ def __set__(self, instance, value):
+ setattr(instance, self.storage_name, value)
+
+
+class Validated(abc.ABC, AutoStorage):
+
+ def __set__(self, instance, value):
+ value = self.validate(instance, value)
+ super().__set__(instance, value)
+
+ @abc.abstractmethod
+ def validate(self, instance, value):
+ """return validated value or raise ValueError"""
+
+
+class Quantity(Validated):
+ """a number greater than zero"""
+
+ def validate(self, instance, value):
+ if value <= 0:
+ raise ValueError('value must be > 0')
+ return value
+
+
+class NonBlank(Validated):
+ """a string with at least one non-space character"""
+
+ def validate(self, instance, value):
+ value = value.strip()
+ if len(value) == 0:
+ raise ValueError('value cannot be empty or blank')
+ return value
+
+# BEGIN MODEL_V7
+class EntityMeta(type):
+ """Metaclass for business entities with validated fields"""
+
+ def __init__(cls, name, bases, attr_dict):
+ super().__init__(name, bases, attr_dict) # <1>
+ for key, attr in attr_dict.items(): # <2>
+ if isinstance(attr, Validated):
+ type_name = type(attr).__name__
+ attr.storage_name = '_{}#{}'.format(type_name, key)
+
+class Entity(metaclass=EntityMeta): # <3>
+ """Business entity with validated fields"""
+# END MODEL_V7
diff --git a/21-class-metaprog/bulkfood/model_v8.py b/21-class-metaprog/bulkfood/model_v8.py
new file mode 100644
index 0000000..675c8df
--- /dev/null
+++ b/21-class-metaprog/bulkfood/model_v8.py
@@ -0,0 +1,80 @@
+import abc
+import collections
+
+
+class AutoStorage:
+ __counter = 0
+
+ def __init__(self):
+ cls = self.__class__
+ prefix = cls.__name__
+ index = cls.__counter
+ self.storage_name = '_{}#{}'.format(prefix, index)
+ cls.__counter += 1
+
+ def __get__(self, instance, owner):
+ if instance is None:
+ return self
+ else:
+ return getattr(instance, self.storage_name)
+
+ def __set__(self, instance, value):
+ setattr(instance, self.storage_name, value)
+
+
+class Validated(abc.ABC, AutoStorage):
+
+ def __set__(self, instance, value):
+ value = self.validate(instance, value)
+ super().__set__(instance, value)
+
+ @abc.abstractmethod
+ def validate(self, instance, value):
+ """return validated value or raise ValueError"""
+
+
+class Quantity(Validated):
+ """a number greater than zero"""
+
+ def validate(self, instance, value):
+ if value <= 0:
+ raise ValueError('value must be > 0')
+ return value
+
+
+class NonBlank(Validated):
+ """a string with at least one non-space character"""
+
+ def validate(self, instance, value):
+ value = value.strip()
+ if len(value) == 0:
+ raise ValueError('value cannot be empty or blank')
+ return value
+
+# BEGIN MODEL_V8
+class EntityMeta(type):
+ """Metaclass for business entities with validated fields"""
+
+ @classmethod
+ def __prepare__(cls, name, bases):
+ return collections.OrderedDict() # <1>
+
+ def __init__(cls, name, bases, attr_dict):
+ super().__init__(name, bases, attr_dict)
+ cls._field_names = [] # <2>
+ for key, attr in attr_dict.items(): # <3>
+ if isinstance(attr, Validated):
+ type_name = type(attr).__name__
+ attr.storage_name = '_{}#{}'.format(type_name, key)
+ cls._field_names.append(key) # <4>
+
+
+class Entity(metaclass=EntityMeta):
+ """Business entity with validated fields"""
+
+ @classmethod
+ def field_names(cls): # <5>
+ for name in cls._field_names:
+ yield name
+
+# END MODEL_V8
diff --git a/21-class-metaprog/evalsupport.py b/21-class-metaprog/evalsupport.py
new file mode 100644
index 0000000..51bb890
--- /dev/null
+++ b/21-class-metaprog/evalsupport.py
@@ -0,0 +1,25 @@
+print('<[100]> evalsupport module start')
+
+def deco_alpha(cls):
+ print('<[200]> deco_alpha')
+
+ def inner_1(self):
+ print('<[300]> deco_alpha:inner_1')
+
+ cls.method_y = inner_1
+ return cls
+
+
+class MetaAleph(type):
+ print('<[400]> MetaAleph body')
+
+ def __init__(cls, name, bases, dic):
+ print('<[500]> MetaAleph.__init__')
+
+ def inner_2(self):
+ print('<[600]> MetaAleph.__init__:inner_2')
+
+ cls.method_z = inner_2
+
+
+print('<[700]> evalsupport module end')
diff --git a/21-class-metaprog/evaltime.py b/21-class-metaprog/evaltime.py
new file mode 100644
index 0000000..299098a
--- /dev/null
+++ b/21-class-metaprog/evaltime.py
@@ -0,0 +1,49 @@
+from evalsupport import deco_alpha
+
+print('<[1]> evaltime module start')
+
+
+class ClassOne():
+ print('<[2]> ClassOne body')
+
+ def __init__(self):
+ print('<[3]> ClassOne.__init__')
+
+ def __del__(self):
+ print('<[4]> ClassOne.__del__')
+
+ def method_x(self):
+ print('<[5]> ClassOne.method_x')
+
+ class ClassTwo(object):
+ print('<[6]> ClassTwo body')
+
+
+@deco_alpha
+class ClassThree():
+ print('<[7]> ClassThree body')
+
+ def method_y(self):
+ print('<[8]> ClassThree.method_y')
+
+
+class ClassFour(ClassThree):
+ print('<[9]> ClassFour body')
+
+ def method_y(self):
+ print('<[10]> ClassFour.method_y')
+
+
+if __name__ == '__main__':
+ print('<[11]> ClassOne tests', 30 * '.')
+ one = ClassOne()
+ one.method_x()
+ print('<[12]> ClassThree tests', 30 * '.')
+ three = ClassThree()
+ three.method_y()
+ print('<[13]> ClassFour tests', 30 * '.')
+ four = ClassFour()
+ four.method_y()
+
+
+print('<[14]> evaltime module end')
diff --git a/21-class-metaprog/evaltime_meta.py b/21-class-metaprog/evaltime_meta.py
new file mode 100644
index 0000000..bc8f960
--- /dev/null
+++ b/21-class-metaprog/evaltime_meta.py
@@ -0,0 +1,53 @@
+from evalsupport import deco_alpha
+from evalsupport import MetaAleph
+
+print('<[1]> evaltime_meta module start')
+
+
+@deco_alpha
+class ClassThree():
+ print('<[2]> ClassThree body')
+
+ def method_y(self):
+ print('<[3]> ClassThree.method_y')
+
+
+class ClassFour(ClassThree):
+ print('<[4]> ClassFour body')
+
+ def method_y(self):
+ print('<[5]> ClassFour.method_y')
+
+
+class ClassFive(metaclass=MetaAleph):
+ print('<[6]> ClassFive body')
+
+ def __init__(self):
+ print('<[7]> ClassFive.__init__')
+
+ def method_z(self):
+ print('<[8]> ClassFive.method_y')
+
+
+class ClassSix(ClassFive):
+ print('<[9]> ClassSix body')
+
+ def method_z(self):
+ print('<[10]> ClassSix.method_y')
+
+
+if __name__ == '__main__':
+ print('<[11]> ClassThree tests', 30 * '.')
+ three = ClassThree()
+ three.method_y()
+ print('<[12]> ClassFour tests', 30 * '.')
+ four = ClassFour()
+ four.method_y()
+ print('<[13]> ClassFive tests', 30 * '.')
+ five = ClassFive()
+ five.method_z()
+ print('<[14]> ClassSix tests', 30 * '.')
+ six = ClassSix()
+ six.method_z()
+
+print('<[15]> evaltime_meta module end')
diff --git a/21-class-metaprog/factories.py b/21-class-metaprog/factories.py
new file mode 100644
index 0000000..bce83bf
--- /dev/null
+++ b/21-class-metaprog/factories.py
@@ -0,0 +1,59 @@
+"""
+record_factory: create simple classes just for holding data fields
+
+# BEGIN RECORD_FACTORY_DEMO
+ >>> Dog = record_factory('Dog', 'name weight owner') # <1>
+ >>> rex = Dog('Rex', 30, 'Bob')
+ >>> rex # <2>
+ Dog(name='Rex', weight=30, owner='Bob')
+ >>> name, weight, _ = rex # <3>
+ >>> name, weight
+ ('Rex', 30)
+ >>> "{2}'s dog weighs {1}kg".format(*rex) # <4>
+ "Bob's dog weighs 30kg"
+ >>> rex.weight = 32 # <5>
+ >>> rex
+ Dog(name='Rex', weight=32, owner='Bob')
+ >>> Dog.__mro__ # <6>
+ (, )
+
+# END RECORD_FACTORY_DEMO
+
+The factory also accepts a list or tuple of identifiers:
+
+ >>> Dog = record_factory('Dog', ['name', 'weight', 'owner'])
+ >>> Dog.__slots__
+ ('name', 'weight', 'owner')
+
+"""
+
+# BEGIN RECORD_FACTORY
+def record_factory(cls_name, field_names):
+ try:
+ field_names = field_names.replace(',', ' ').split() # <1>
+ except AttributeError: # no .replace or .split
+ pass # assume it's already a sequence of identifiers
+ field_names = tuple(field_names) # <2>
+
+ def __init__(self, *args, **kwargs): # <3>
+ attrs = dict(zip(self.__slots__, args))
+ attrs.update(kwargs)
+ for name, value in attrs.items():
+ setattr(self, name, value)
+
+ def __iter__(self): # <4>
+ for name in self.__slots__:
+ yield getattr(self, name)
+
+ def __repr__(self): # <5>
+ values = ', '.join('{}={!r}'.format(*i) for i
+ in zip(self.__slots__, self))
+ return '{}({})'.format(self.__class__.__name__, values)
+
+ cls_attrs = dict(__slots__ = field_names, # <6>
+ __init__ = __init__,
+ __iter__ = __iter__,
+ __repr__ = __repr__)
+
+ return type(cls_name, (object,), cls_attrs) # <7>
+# END RECORD_FACTORY
diff --git a/README.rst b/README.rst
index 99a0ede..39b95c4 100644
--- a/README.rst
+++ b/README.rst
@@ -1,9 +1,9 @@
-Fluent Python: example code
-===========================
+Fluent Python, First Edition: example code
+==========================================
-Example code for the book `Fluent Python`_ by Luciano Ramalho (O'Reilly, 2014).
+**This repository is archived and will not be updated. Please visit https://github.com/fluentpython/example-code-2e**
- **BEWARE**: This is a work in progress, like the book itself.
+Example code for the book `Fluent Python, First Edition` by Luciano Ramalho (O'Reilly, 2015).
* Code here may change and disappear without warning.
diff --git a/attributes/exists_truthy.py b/attic/attributes/exists_truthy.py
similarity index 100%
rename from attributes/exists_truthy.py
rename to attic/attributes/exists_truthy.py
diff --git a/attributes/hasattr.py b/attic/attributes/hasattr.py
similarity index 100%
rename from attributes/hasattr.py
rename to attic/attributes/hasattr.py
diff --git a/classes/spherical-coordinates.txt b/attic/classes/spherical-coordinates.txt
similarity index 100%
rename from classes/spherical-coordinates.txt
rename to attic/classes/spherical-coordinates.txt
diff --git a/classes/sum-nth-element.rst b/attic/classes/sum-nth-element.rst
similarity index 100%
rename from classes/sum-nth-element.rst
rename to attic/classes/sum-nth-element.rst
diff --git a/classes/test_vector_spherical.py b/attic/classes/test_vector_spherical.py
similarity index 100%
rename from classes/test_vector_spherical.py
rename to attic/classes/test_vector_spherical.py
diff --git a/concurrency/charfinder/charfinder.html b/attic/concurrency/charfinder/charfinder.html
similarity index 100%
rename from concurrency/charfinder/charfinder.html
rename to attic/concurrency/charfinder/charfinder.html
diff --git a/concurrency/charfinder/charfinder.py b/attic/concurrency/charfinder/charfinder.py
similarity index 96%
rename from concurrency/charfinder/charfinder.py
rename to attic/concurrency/charfinder/charfinder.py
index c2a6e50..72e21a4 100755
--- a/concurrency/charfinder/charfinder.py
+++ b/attic/concurrency/charfinder/charfinder.py
@@ -63,9 +63,9 @@
import itertools
from collections import namedtuple
-RE_WORD = re.compile('\w+')
+RE_WORD = re.compile(r'\w+')
RE_UNICODE_NAME = re.compile('^[A-Z0-9 -]+$')
-RE_CODEPOINT = re.compile('U\+([0-9A-F]{4,6})')
+RE_CODEPOINT = re.compile(r'U\+([0-9A-F]{4,6})')
INDEX_NAME = 'charfinder_index.pickle'
MINIMUM_SAVE_LEN = 10000
@@ -165,7 +165,7 @@ def find_chars(self, query, start=0, stop=None):
for word in tokenize(query):
if word in self.index:
result_sets.append(self.index[word])
- else: # shorcut: no such word
+ else: # shortcut: no such word
result_sets = []
break
if result_sets:
@@ -186,7 +186,7 @@ def describe(self, char):
return CharDescription(code_str, char, name)
def find_descriptions(self, query, start=0, stop=None):
- for char in self.find_chars(query, start, stop):
+ for char in self.find_chars(query, start, stop).items:
yield self.describe(char)
def get_descriptions(self, chars):
@@ -197,7 +197,7 @@ def describe_str(self, char):
return '{:7}\t{}\t{}'.format(*self.describe(char))
def find_description_strs(self, query, start=0, stop=None):
- for char in self.find_chars(query, start, stop):
+ for char in self.find_chars(query, start, stop).items:
yield self.describe_str(char)
@staticmethod # not an instance method due to concurrency
diff --git a/concurrency/charfinder/http_charfinder.py b/attic/concurrency/charfinder/http_charfinder.py
similarity index 100%
rename from concurrency/charfinder/http_charfinder.py
rename to attic/concurrency/charfinder/http_charfinder.py
diff --git a/concurrency/charfinder/http_charfinder2.py b/attic/concurrency/charfinder/http_charfinder2.py
similarity index 100%
rename from concurrency/charfinder/http_charfinder2.py
rename to attic/concurrency/charfinder/http_charfinder2.py
diff --git a/concurrency/charfinder/tcp_charfinder.py b/attic/concurrency/charfinder/tcp_charfinder.py
similarity index 100%
rename from concurrency/charfinder/tcp_charfinder.py
rename to attic/concurrency/charfinder/tcp_charfinder.py
diff --git a/concurrency/charfinder/test_charfinder.py b/attic/concurrency/charfinder/test_charfinder.py
similarity index 100%
rename from concurrency/charfinder/test_charfinder.py
rename to attic/concurrency/charfinder/test_charfinder.py
diff --git a/attic/concurrency/flags/README.rst b/attic/concurrency/flags/README.rst
new file mode 100644
index 0000000..a956a62
--- /dev/null
+++ b/attic/concurrency/flags/README.rst
@@ -0,0 +1,33 @@
+=========================================
+Setting up the test environment
+=========================================
+
+Some of the concurrency examples in this book require a local HTTP
+server. These instructions show how I setup Ngnix on GNU/Linux,
+Mac OS X 10.9 and Windows 7.
+
+Nginx setup on Mac OS X
+========================
+
+Homebrew (copy & paste code at the bottom of http://brew.sh/)::
+
+ $ ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
+ $ brew doctor
+ $ brew install nginx
+
+Download and unpack::
+
+Docroot is: /usr/local/var/www
+/usr/local/etc/nginx/nginx.conf
+
+To have launchd start nginx at login:
+ ln -sfv /usr/local/opt/nginx/*.plist ~/Library/LaunchAgents
+Then to load nginx now:
+ launchctl load ~/Library/LaunchAgents/homebrew.mxcl.nginx.plist
+Or, if you don't want/need launchctl, you can just run:
+ nginx
+
+Nginx setup on Lubuntu 14.04.1 LTS
+==================================
+
+/usr/share/nginx/html
diff --git a/attic/concurrency/flags/add_continent.py b/attic/concurrency/flags/add_continent.py
new file mode 100644
index 0000000..de47456
--- /dev/null
+++ b/attic/concurrency/flags/add_continent.py
@@ -0,0 +1,27 @@
+# Source for continent listings:
+# United Nations Statistics Division
+# http://unstats.un.org/unsd/cr/ctryreg/default.asp?Lg=1
+
+CONTINENTS = dict(AF='Africa',
+ AS='Asia',
+ EU='Europe',
+ NA='North America',
+ SA='South America',
+ OC='Oceania')
+
+COUNTRY_CONTINENT = {}
+
+for cont_code, cont_name in CONTINENTS.items():
+ cont_suffix = cont_name.lower().replace(' ', '_')
+ with open('continent-' + cont_suffix + '.txt') as fp:
+ for country in fp:
+ COUNTRY_CONTINENT[country.strip()] = cont_code
+
+with open('country-codes.tab') as fp:
+ for lin in fp:
+ if lin.startswith('#'):
+ continue
+ lin = lin.strip()
+ cc, gec, name = lin.split('\t')
+ cont = COUNTRY_CONTINENT.get(name, '??')
+ print(cc, gec, cont, name, sep='\t')
diff --git a/attic/concurrency/flags/build_fixture.py b/attic/concurrency/flags/build_fixture.py
new file mode 100644
index 0000000..ea7a127
--- /dev/null
+++ b/attic/concurrency/flags/build_fixture.py
@@ -0,0 +1,30 @@
+"""
+Build flags fixture
+"""
+
+import shutil
+import os
+import json
+
+SRC = 'img/'
+DEST = 'fixture/'
+
+with open('country-codes.tab') as cc_fp:
+ for line in cc_fp:
+ if line.startswith('#'):
+ continue
+ iso_cc, gec_cc, name = line.strip().split('\t')
+ print(iso_cc, name)
+ cc = iso_cc.lower()
+ img_name = cc + '.gif'
+ from_file = os.path.join(SRC, img_name)
+ to_path = os.path.join(DEST, cc)
+ os.mkdir(to_path)
+ to_file = os.path.join(to_path, img_name)
+ shutil.copyfile(from_file, to_file)
+ tld_cc = 'uk' if cc == 'gb' else cc
+ metadata = {'country': name, 'iso_cc': iso_cc,
+ 'tld_cc': '.'+tld_cc, 'gec_cc': gec_cc}
+
+ with open(os.path.join(to_path, 'metadata.json'), 'wt') as json_fp:
+ json.dump(metadata, json_fp, ensure_ascii=True)
diff --git a/attic/concurrency/flags/build_fixture_with_continents.py b/attic/concurrency/flags/build_fixture_with_continents.py
new file mode 100644
index 0000000..3195571
--- /dev/null
+++ b/attic/concurrency/flags/build_fixture_with_continents.py
@@ -0,0 +1,36 @@
+"""
+Build flags fixture
+"""
+
+import shutil
+import os
+import json
+
+SRC = 'img/'
+DEST = 'fixture/'
+CONTINENTS = dict(AF='Africa',
+ AS='Asia',
+ EU='Europe',
+ NA='North America',
+ SA='South America',
+ OC='Oceania')
+
+with open('countries-continents.tab') as cc_fp:
+ for line in cc_fp:
+ if line.startswith('#'):
+ continue
+ iso_cc, gec_cc, cont, name = line.strip().split('\t')
+ print(iso_cc, name)
+ cc = iso_cc.lower()
+ img_name = cc + '.gif'
+ from_file = os.path.join(SRC, img_name)
+ to_path = os.path.join(DEST, cc)
+ os.mkdir(to_path)
+ to_file = os.path.join(to_path, img_name)
+ shutil.copyfile(from_file, to_file)
+ tld_cc = 'uk' if cc == 'gb' else cc
+ metadata = {'country': name, 'continent':CONTINENTS[cont],
+ 'iso_cc': iso_cc, 'tld_cc': '.'+tld_cc, 'gec_cc': gec_cc}
+
+ with open(os.path.join(to_path, 'metadata.json'), 'wt') as json_fp:
+ json.dump(metadata, json_fp, ensure_ascii=True)
diff --git a/attic/concurrency/flags/cc_count.py b/attic/concurrency/flags/cc_count.py
new file mode 100644
index 0000000..af1046e
--- /dev/null
+++ b/attic/concurrency/flags/cc_count.py
@@ -0,0 +1,19 @@
+
+from collections import Counter
+from operator import itemgetter
+from string import ascii_uppercase
+
+with open('country-codes.tab') as fp:
+ ct = Counter()
+ for line in fp:
+ if line.startswith('#'):
+ continue
+ cc, _, _ = line.split('\t')
+ ct[cc[0]] += 1
+ print(cc, end=' ')
+
+for key, value in sorted(ct.items(), key=itemgetter(1), reverse=True):
+ print(key, value)
+
+print('Total:', sum(ct.values()))
+print('Missing:', ', '.join(set(ascii_uppercase) - ct.keys()))
diff --git a/attic/concurrency/flags/cc_tlds.py b/attic/concurrency/flags/cc_tlds.py
new file mode 100644
index 0000000..2c96dab
--- /dev/null
+++ b/attic/concurrency/flags/cc_tlds.py
@@ -0,0 +1,36 @@
+"""
+Check country code TLDs
+"""
+
+import shutil
+import os
+import json
+
+iso_cc_db = {}
+
+with open('country-codes.tab') as cc_fp:
+ for line in cc_fp:
+ if line.startswith('#'):
+ continue
+ iso_cc, gec_cc, name = line.strip().split('\t')
+ iso_cc_db[iso_cc.lower()] = name
+
+tld_cc_db = {}
+
+with open('tlds.tab') as cc_fp:
+ for line in cc_fp:
+ if line.startswith('#'):
+ continue
+ tld_cc, category, entity = line.strip().split('\t')
+ if category.strip() != 'country-code':
+ continue
+ if ascii(tld_cc) != repr(tld_cc):
+ continue
+ tld_cc_db[tld_cc[1:].strip()] = entity
+
+not_tld = iso_cc_db.keys() - tld_cc_db.keys()
+print(sorted(not_tld))
+
+for iso_cc, name in sorted(iso_cc_db.items()):
+ entity = tld_cc_db[iso_cc]
+ print('{}\t{}\t{}'.format(iso_cc, name, entity))
diff --git a/attic/concurrency/flags/continents.zip b/attic/concurrency/flags/continents.zip
new file mode 100644
index 0000000..a2b30b2
Binary files /dev/null and b/attic/concurrency/flags/continents.zip differ
diff --git a/concurrency/flags/count_colors.py b/attic/concurrency/flags/count_colors.py
similarity index 100%
rename from concurrency/flags/count_colors.py
rename to attic/concurrency/flags/count_colors.py
diff --git a/attic/concurrency/flags/countries-continents.tab b/attic/concurrency/flags/countries-continents.tab
new file mode 100644
index 0000000..4778ea0
--- /dev/null
+++ b/attic/concurrency/flags/countries-continents.tab
@@ -0,0 +1,195 @@
+# ISO-3166-1 US-GEC continent name
+AF AF AS Afghanistan
+AL AL EU Albania
+DZ AG AF Algeria
+AD AN EU Andorra
+AO AO AF Angola
+AG AC NA Antigua and Barbuda
+AR AR SA Argentina
+AM AM AS Armenia
+AU AS OC Australia
+AT AU EU Austria
+AZ AJ AS Azerbaijan
+BS BF NA Bahamas
+BH BA AS Bahrain
+BD BG AS Bangladesh
+BB BB NA Barbados
+BY BO EU Belarus
+BE BE EU Belgium
+BZ BH NA Belize
+BJ BN AF Benin
+BT BT AS Bhutan
+BO BL SA Bolivia
+BA BK EU Bosnia and Herzegovina
+BW BC AF Botswana
+BR BR SA Brazil
+BN BX AS Brunei Darussalam
+BG BU EU Bulgaria
+BF UV AF Burkina Faso
+BI BY AF Burundi
+KH CB AS Cambodia
+CM CM AF Cameroon
+CA CA NA Canada
+CV CV AF Cape Verde
+CF CT AF Central African Republic
+TD CD AF Chad
+CL CI SA Chile
+CN CH AS China
+CO CO SA Colombia
+KM CN AF Comoros
+CG CF AF Congo (Brazzaville)
+CD CG AF Congo (Kinshasa)
+CR CS NA Costa Rica
+CI IV AF Côte d'Ivoire
+HR HR EU Croatia
+CU CU NA Cuba
+CY CY AS Cyprus
+CZ EZ EU Czech Republic
+DK DA EU Denmark
+DJ DJ AF Djibouti
+DM DO NA Dominica
+EC EC SA Ecuador
+EG EG AF Egypt
+SV ES NA El Salvador
+GQ EK AF Equatorial Guinea
+ER ER AF Eritrea
+EE EN EU Estonia
+ET ET AF Ethiopia
+FJ FJ OC Fiji
+FI FI EU Finland
+FR FR EU France
+GA GB AF Gabon
+GM GA AF Gambia
+GE GG AS Georgia
+DE GM EU Germany
+GH GH AF Ghana
+GR GR EU Greece
+GD GJ NA Grenada
+GT GT NA Guatemala
+GN GV AF Guinea
+GW PU AF Guinea-Bissau
+GY GY SA Guyana
+HT HA NA Haiti
+HN HO NA Honduras
+HU HU EU Hungary
+IS IC EU Iceland
+IN IN AS India
+ID ID AS Indonesia
+IR IR AS Iran
+IQ IZ AS Iraq
+IE EI EU Ireland
+IL IS AS Israel
+IT IT EU Italy
+JM JM NA Jamaica
+JP JA AS Japan
+JO JO AS Jordan
+KZ KZ AS Kazakhstan
+KE KE AF Kenya
+KI KR OC Kiribati
+KP KN AS Korea, North
+KR KS AS Korea, South
+KW KU AS Kuwait
+KG KG AS Kyrgyzstan
+LA LA AS Laos
+LV LG EU Latvia
+LB LE AS Lebanon
+LS LT AF Lesotho
+LR LI AF Liberia
+LY LY AF Libya
+LI LS EU Liechtenstein
+LT LH EU Lithuania
+LU LU EU Luxembourg
+MK MK EU Macedonia
+MG MA AF Madagascar
+MW MI AF Malawi
+MY MY AS Malaysia
+MV MV AF Maldives
+ML ML AF Mali
+MT MT EU Malta
+MH RM OC Marshall Islands
+MR MR AF Mauritania
+MU MP AF Mauritius
+MX MX NA Mexico
+FM FM OC Micronesia
+MD MD EU Moldova
+MC MN EU Monaco
+MN MG AS Mongolia
+ME MJ EU Montenegro
+MA MO AF Morocco
+MZ MZ AF Mozambique
+MM BM AS Myanmar
+NA WA AF Namibia
+NR NR OC Nauru
+NP NP AS Nepal
+NL NL EU Netherlands
+NZ NZ OC New Zealand
+NI NU NA Nicaragua
+NE NG AF Niger
+NG NI AF Nigeria
+NO NO EU Norway
+OM MU AS Oman
+PK PK AS Pakistan
+PW PS OC Palau
+PA PM NA Panama
+PG PP OC Papua New Guinea
+PY PA SA Paraguay
+PE PE SA Peru
+PH RP AS Philippines
+PL PL EU Poland
+PT PO EU Portugal
+QA QA AS Qatar
+RO RO EU Romania
+RU RS EU Russian Federation
+RW RW AF Rwanda
+KN SC NA Saint Kitts and Nevis
+LC ST NA Saint Lucia
+VC VC NA Grenadines
+WS WS OC Samoa
+SM SM EU San Marino
+ST TP AF Sao Tome and Principe
+SA SA AS Saudi Arabia
+SN SG AF Senegal
+RS RI EU Serbia
+SC SE AF Seychelles
+SL SL AF Sierra Leone
+SG SN AS Singapore
+SK LO EU Slovakia
+SI SI EU Slovenia
+SB BP OC Solomon Islands
+SO SO AF Somalia
+ZA SF AF South Africa
+SS OD AF South Sudan
+ES SP EU Spain
+LK CE AS Sri Lanka
+SD SU AF Sudan
+SR NS SA Suriname
+SZ WZ AF Swaziland
+SE SW EU Sweden
+CH SZ EU Switzerland
+SY SY AS Syria
+TW TW AS Taiwan
+TJ TI AS Tajikistan
+TZ TZ AF Tanzania
+TH TH AS Thailand
+TL TT OC Timor-Leste
+TG TO AF Togo
+TO TN OC Tonga
+TT TD NA Trinidad and Tobago
+TN TS AF Tunisia
+TR TU AS Turkey
+TM TX AS Turkmenistan
+TV TV OC Tuvalu
+UG UG AF Uganda
+UA UP EU Ukraine
+AE AE AS United Arab Emirates
+GB UK EU United Kingdom
+US US NA United States of America
+UY UY SA Uruguay
+UZ UZ AS Uzbekistan
+VU NH OC Vanuatu
+VA VT EU Vatican City
+VE VE SA Venezuela
+VN VM AS Vietnam
+YE YM AS Yemen
+ZM ZA AF Zambia
+ZW ZI AF Zimbabwe
diff --git a/concurrency/flags/country-codes.tab b/attic/concurrency/flags/country-codes.tab
similarity index 100%
rename from concurrency/flags/country-codes.tab
rename to attic/concurrency/flags/country-codes.tab
diff --git a/concurrency/flags/countryflags.py b/attic/concurrency/flags/countryflags.py
similarity index 100%
rename from concurrency/flags/countryflags.py
rename to attic/concurrency/flags/countryflags.py
diff --git a/attic/concurrency/flags/fixture.tar.gz b/attic/concurrency/flags/fixture.tar.gz
new file mode 100644
index 0000000..be32b40
Binary files /dev/null and b/attic/concurrency/flags/fixture.tar.gz differ
diff --git a/concurrency/flags/getsequential.py b/attic/concurrency/flags/getsequential.py
similarity index 100%
rename from concurrency/flags/getsequential.py
rename to attic/concurrency/flags/getsequential.py
diff --git a/concurrency/flags/getthreadpool.py b/attic/concurrency/flags/getthreadpool.py
similarity index 100%
rename from concurrency/flags/getthreadpool.py
rename to attic/concurrency/flags/getthreadpool.py
diff --git a/concurrency/flags/graphs.ods b/attic/concurrency/flags/graphs.ods
similarity index 100%
rename from concurrency/flags/graphs.ods
rename to attic/concurrency/flags/graphs.ods
diff --git a/concurrency/flags/img.zip b/attic/concurrency/flags/img.zip
similarity index 100%
rename from concurrency/flags/img.zip
rename to attic/concurrency/flags/img.zip
diff --git a/concurrency/flags/img/README.txt b/attic/concurrency/flags/img/README.txt
similarity index 100%
rename from concurrency/flags/img/README.txt
rename to attic/concurrency/flags/img/README.txt
diff --git a/attic/concurrency/flags/tlds.tab b/attic/concurrency/flags/tlds.tab
new file mode 100644
index 0000000..dcf1a70
--- /dev/null
+++ b/attic/concurrency/flags/tlds.tab
@@ -0,0 +1,848 @@
+# https://www.iana.org/domains/root/db
+.abogado generic Top Level Domain Holdings Limited
+.ac country-code Network Information Center (AC Domain Registry) c/o Cable and Wireless (Ascension Island)
+.academy generic Half Oaks, LLC
+.accountants generic Knob Town, LLC
+.active generic The Active Network, Inc
+.actor generic United TLD Holdco Ltd.
+.ad country-code Andorra Telecom
+.adult generic ICM Registry AD LLC
+.ae country-code Telecommunication Regulatory Authority (TRA)
+.aero sponsored Societe Internationale de Telecommunications Aeronautique (SITA INC USA)
+.af country-code Ministry of Communications and IT
+.ag country-code UHSA School of Medicine
+.agency generic Steel Falls, LLC
+.ai country-code Government of Anguilla
+.airforce generic United TLD Holdco Ltd.
+.al country-code Electronic and Postal Communications Authority - AKEP
+.allfinanz generic Allfinanz Deutsche Vermögensberatung Aktiengesellschaft
+.alsace generic REGION D ALSACE
+.am country-code Internet Society
+.amsterdam generic Gemeente Amsterdam
+.an country-code University of Curacao
+.android generic Charleston Road Registry Inc.
+.ao country-code Faculdade de Engenharia da Universidade Agostinho Neto
+.apartments generic June Maple, LLC
+.aq country-code Antarctica Network Information Centre Limited
+.aquarelle generic Aquarelle.com
+.ar country-code Presidencia de la Nación – Secretaría Legal y Técnica
+.archi generic STARTING DOT LIMITED
+.army generic United TLD Holdco Ltd.
+.arpa infrastructure Internet Architecture Board (IAB)
+.as country-code AS Domain Registry
+.asia sponsored DotAsia Organisation Ltd.
+.associates generic Baxter Hill, LLC
+.at country-code nic.at GmbH
+.attorney generic United TLD Holdco, Ltd
+.au country-code .au Domain Administration (auDA)
+.auction generic United TLD HoldCo, Ltd.
+.audio generic Uniregistry, Corp.
+.autos generic DERAutos, LLC
+.aw country-code SETAR
+.ax country-code Ålands landskapsregering
+.axa generic AXA SA
+.az country-code IntraNS
+.ba country-code Universtiy Telinformatic Centre (UTIC)
+.band generic United TLD Holdco, Ltd
+.bank generic fTLD Registry Services, LLC
+.bar generic Punto 2012 Sociedad Anonima Promotora de Inversion de Capital Variable
+.barclaycard generic Barclays Bank PLC
+.barclays generic Barclays Bank PLC
+.bargains generic Half Hallow, LLC
+.bayern generic Bayern Connect GmbH
+.bb country-code Government of Barbados Ministry of Economic Affairs and Development Telecommunications Unit
+.bd country-code Ministry of Post & Telecommunications Bangladesh Secretariat
+.be country-code DNS Belgium vzw/asbl
+.beer generic Top Level Domain Holdings Limited
+.berlin generic dotBERLIN GmbH & Co. KG
+.best generic BestTLD Pty Ltd
+.bf country-code ARCE-AutoritÈ de RÈgulation des Communications Electroniques
+.bg country-code Register.BG
+.bh country-code Telecommunications Regulatory Authority (TRA)
+.bi country-code Centre National de l'Informatique
+.bid generic dot Bid Limited
+.bike generic Grand Hollow, LLC
+.bingo generic Sand Cedar, LLC
+.bio generic STARTING DOT LIMITED
+.biz generic-restricted NeuStar, Inc.
+.bj country-code Benin Telecoms S.A.
+.bl country-code Not assigned
+.black generic Afilias Limited
+.blackfriday generic Uniregistry, Corp.
+.bloomberg generic Bloomberg IP Holdings LLC
+.blue generic Afilias Limited
+.bm country-code Registry General Ministry of Labour and Immigration
+.bmw generic Bayerische Motoren Werke Aktiengesellschaft
+.bn country-code Telekom Brunei Berhad
+.bnpparibas generic BNP Paribas
+.bo country-code Agencia para el Desarrollo de la Información de la Sociedad en Bolivia
+.boo generic Charleston Road Registry Inc.
+.boutique generic Over Galley, LLC
+.bq country-code Not assigned
+.br country-code Comite Gestor da Internet no Brasil
+.brussels generic DNS.be vzw
+.bs country-code The College of the Bahamas
+.bt country-code Ministry of Information and Communications
+.budapest generic Top Level Domain Holdings Limited
+.build generic Plan Bee LLC
+.builders generic Atomic Madison, LLC
+.business generic Spring Cross, LLC
+.buzz generic DOTSTRATEGY CO.
+.bv country-code UNINETT Norid A/S
+.bw country-code Botswana Communications Regulatory Authority (BOCRA)
+.by country-code Reliable Software Inc.
+.bz country-code University of Belize
+.bzh generic Association www.bzh
+.ca country-code Canadian Internet Registration Authority (CIRA) Autorite Canadienne pour les Enregistrements Internet (ACEI)
+.cab generic Half Sunset, LLC
+.cal generic Charleston Road Registry Inc.
+.camera generic Atomic Maple, LLC
+.camp generic Delta Dynamite, LLC
+.cancerresearch generic Australian Cancer Research Foundation
+.canon generic Canon Inc.
+.capetown generic ZA Central Registry NPC trading as ZA Central Registry
+.capital generic Delta Mill, LLC
+.caravan generic Caravan International, Inc.
+.cards generic Foggy Hollow, LLC
+.care generic Goose Cross, LLC
+.career generic dotCareer LLC
+.careers generic Wild Corner, LLC
+.cartier generic Richemont DNS Inc.
+.casa generic Top Level Domain Holdings Limited
+.cash generic Delta Lake, LLC
+.cat sponsored Fundacio puntCAT
+.catering generic New Falls. LLC
+.cbn generic The Christian Broadcasting Network, Inc.
+.cc country-code eNIC Cocos (Keeling) Islands Pty. Ltd. d/b/a Island Internet Services
+.cd country-code Office Congolais des Postes et Télécommunications - OCPT
+.center generic Tin Mill, LLC
+.ceo generic CEOTLD Pty Ltd
+.cern generic European Organization for Nuclear Research ("CERN")
+.cf country-code Societe Centrafricaine de Telecommunications (SOCATEL)
+.cg country-code ONPT Congo and Interpoint Switzerland
+.ch country-code SWITCH The Swiss Education & Research Network
+.channel generic Charleston Road Registry Inc.
+.chat generic Sand Fields, LLC
+.cheap generic Sand Cover, LLC
+.christmas generic Uniregistry, Corp.
+.chrome generic Charleston Road Registry Inc.
+.church generic Holly Fileds, LLC
+.ci country-code INP-HB Institut National Polytechnique Felix Houphouet Boigny
+.citic generic CITIC Group Corporation
+.city generic Snow Sky, LLC
+.ck country-code Telecom Cook Islands Ltd.
+.cl country-code NIC Chile (University of Chile)
+.claims generic Black Corner, LLC
+.cleaning generic Fox Shadow, LLC
+.click generic Uniregistry, Corp.
+.clinic generic Goose Park, LLC
+.clothing generic Steel Lake, LLC
+.club generic .CLUB DOMAINS, LLC
+.cm country-code Cameroon Telecommunications (CAMTEL)
+.cn country-code Computer Network Information Center, Chinese Academy of Sciences
+.co country-code .CO Internet S.A.S.
+.coach generic Koko Island, LLC
+.codes generic Puff Willow, LLC
+.coffee generic Trixy Cover, LLC
+.college generic XYZ.COM LLC
+.cologne generic NetCologne Gesellschaft für Telekommunikation mbH
+.com generic VeriSign Global Registry Services
+.community generic Fox Orchard, LLC
+.company generic Silver Avenue, LLC
+.computer generic Pine Mill, LLC
+.condos generic Pine House, LLC
+.construction generic Fox Dynamite, LLC
+.consulting generic United TLD Holdco, LTD.
+.contractors generic Magic Woods, LLC
+.cooking generic Top Level Domain Holdings Limited
+.cool generic Koko Lake, LLC
+.coop sponsored DotCooperation LLC
+.country generic Top Level Domain Holdings Limited
+.cr country-code National Academy of Sciences Academia Nacional de Ciencias
+.credit generic Snow Shadow, LLC
+.creditcard generic Binky Frostbite, LLC
+.cricket generic dot Cricket Limited
+.crs generic Federated Co-operatives Limited
+.cruises generic Spring Way, LLC
+.cu country-code CENIAInternet Industria y San Jose Capitolio Nacional
+.cuisinella generic SALM S.A.S.
+.cv country-code Agência Nacional das Comunicações (ANAC)
+.cw country-code University of Curacao
+.cx country-code Christmas Island Internet Administration Limited
+.cy country-code University of Cyprus
+.cymru generic Nominet UK
+.cz country-code CZ.NIC, z.s.p.o
+.dabur generic Dabur India Limited
+.dad generic Charleston Road Registry Inc.
+.dance generic United TLD Holdco Ltd.
+.dating generic Pine Fest, LLC
+.day generic Charleston Road Registry Inc.
+.dclk generic Charleston Road Registry Inc.
+.de country-code DENIC eG
+.deals generic Sand Sunset, LLC
+.degree generic United TLD Holdco, Ltd
+.delivery generic Steel Station, LLC
+.democrat generic United TLD Holdco Ltd.
+.dental generic Tin Birch, LLC
+.dentist generic United TLD Holdco, Ltd
+.desi generic Desi Networks LLC
+.design generic Top Level Design, LLC
+.dev generic Charleston Road Registry Inc.
+.diamonds generic John Edge, LLC
+.diet generic Uniregistry, Corp.
+.digital generic Dash Park, LLC
+.direct generic Half Trail, LLC
+.directory generic Extra Madison, LLC
+.discount generic Holly Hill, LLC
+.dj country-code Djibouti Telecom S.A
+.dk country-code Dansk Internet Forum
+.dm country-code DotDM Corporation
+.dnp generic Dai Nippon Printing Co., Ltd.
+.do country-code Pontificia Universidad Catolica Madre y Maestra Recinto Santo Tomas de Aquino
+.docs generic Charleston Road Registry Inc.
+.domains generic Sugar Cross, LLC
+.doosan generic Doosan Corporation
+.durban generic ZA Central Registry NPC trading as ZA Central Registry
+.dvag generic Deutsche Vermögensberatung Aktiengesellschaft DVAG
+.dz country-code CERIST
+.eat generic Charleston Road Registry Inc.
+.ec country-code NIC.EC (NICEC) S.A.
+.edu sponsored EDUCAUSE
+.education generic Brice Way, LLC
+.ee country-code Eesti Interneti Sihtasutus (EIS)
+.eg country-code Egyptian Universities Network (EUN) Supreme Council of Universities
+.eh country-code Not assigned
+.email generic Spring Madison, LLC
+.emerck generic Merck KGaA
+.energy generic Binky Birch, LLC
+.engineer generic United TLD Holdco Ltd.
+.engineering generic Romeo Canyon
+.enterprises generic Snow Oaks, LLC
+.equipment generic Corn Station, LLC
+.er country-code Eritrea Telecommunication Services Corporation (EriTel)
+.es country-code Red.es
+.esq generic Charleston Road Registry Inc.
+.estate generic Trixy Park, LLC
+.et country-code Ethio telecom
+.eu country-code EURid vzw/asbl
+.eurovision generic European Broadcasting Union (EBU)
+.eus generic Puntueus Fundazioa
+.events generic Pioneer Maple, LLC
+.everbank generic EverBank
+.exchange generic Spring Falls, LLC
+.expert generic Magic Pass, LLC
+.exposed generic Victor Beach, LLC
+.fail generic Atomic Pipe, LLC
+.fans generic Asiamix Digital Limited
+.farm generic Just Maple, LLC
+.fashion generic Top Level Domain Holdings Limited
+.feedback generic Top Level Spectrum, Inc.
+.fi country-code Finnish Communications Regulatory Authority
+.finance generic Cotton Cypress, LLC
+.financial generic Just Cover, LLC
+.firmdale generic Firmdale Holdings Limited
+.fish generic Fox Woods, LLC
+.fishing generic Top Level Domain Holdings Limited
+.fit generic Minds + Machines Group Limited
+.fitness generic Brice Orchard, LLC
+.fj country-code The University of the South Pacific IT Services
+.fk country-code Falkland Islands Government
+.flights generic Fox Station, LLC
+.florist generic Half Cypress, LLC
+.flowers generic Uniregistry, Corp.
+.flsmidth generic FLSmidth A/S
+.fly generic Charleston Road Registry Inc.
+.fm country-code FSM Telecommunications Corporation
+.fo country-code FO Council
+.foo generic Charleston Road Registry Inc.
+.forsale generic United TLD Holdco, LLC
+.foundation generic John Dale, LLC
+.fr country-code Association Française pour le Nommage Internet en Coopération (A.F.N.I.C.)
+.frl generic FRLregistry B.V.
+.frogans generic OP3FT
+.fund generic John Castle, LLC
+.furniture generic Lone Fields, LLC
+.futbol generic United TLD Holdco, Ltd.
+.ga country-code Agence Nationale des Infrastructures Numériques et des Fréquences (ANINF)
+.gal generic Asociación puntoGAL
+.gallery generic Sugar House, LLC
+.garden generic Top Level Domain Holdings Limited
+.gb country-code Reserved Domain - IANA
+.gbiz generic Charleston Road Registry Inc.
+.gd country-code The National Telecommunications Regulatory Commission (NTRC)
+.gdn generic Joint Stock Company "Navigation-information systems"
+.ge country-code Caucasus Online
+.gent generic COMBELL GROUP NV/SA
+.gf country-code Net Plus
+.gg country-code Island Networks Ltd.
+.ggee generic GMO Internet, Inc.
+.gh country-code Network Computer Systems Limited
+.gi country-code Sapphire Networks
+.gift generic Uniregistry, Corp.
+.gifts generic Goose Sky, LLC
+.gives generic United TLD Holdco Ltd.
+.gl country-code TELE Greenland A/S
+.glass generic Black Cover, LLC
+.gle generic Charleston Road Registry Inc.
+.global generic Dot Global Domain Registry Limited
+.globo generic Globo Comunicação e Participações S.A
+.gm country-code GM-NIC
+.gmail generic Charleston Road Registry Inc.
+.gmo generic GMO Internet, Inc.
+.gmx generic 1&1 Mail & Media GmbH
+.gn country-code Centre National des Sciences Halieutiques de Boussoura
+.goldpoint generic YODOBASHI CAMERA CO.,LTD.
+.goog generic Charleston Road Registry Inc.
+.google generic Charleston Road Registry Inc.
+.gop generic Republican State Leadership Committee, Inc.
+.gov sponsored General Services Administration Attn: QTDC, 2E08 (.gov Domain Registration)
+.gp country-code Networking Technologies Group
+.gq country-code GETESA
+.gr country-code ICS-FORTH GR
+.graphics generic Over Madison, LLC
+.gratis generic Pioneer Tigers, LLC
+.green generic Afilias Limited
+.gripe generic Corn Sunset, LLC
+.gs country-code Government of South Georgia and South Sandwich Islands (GSGSSI)
+.gt country-code Universidad del Valle de Guatemala
+.gu country-code University of Guam Computer Center
+.guide generic Snow Moon, LLC
+.guitars generic Uniregistry, Corp.
+.guru generic Pioneer Cypress, LLC
+.gw country-code Autoridade Reguladora Nacional - Tecnologias de Informação e Comunicação da Guiné-Bissau
+.gy country-code University of Guyana
+.hamburg generic Hamburg Top-Level-Domain GmbH
+.hangout generic Charleston Road Registry Inc.
+.haus generic United TLD Holdco, LTD.
+.healthcare generic Silver Glen, LLC
+.help generic Uniregistry, Corp.
+.here generic Charleston Road Registry Inc.
+.hermes generic Hermes International
+.hiphop generic Uniregistry, Corp.
+.hiv generic dotHIV gemeinnuetziger e.V.
+.hk country-code Hong Kong Internet Registration Corporation Ltd.
+.hm country-code HM Domain Registry
+.hn country-code Red de Desarrollo Sostenible Honduras
+.holdings generic John Madison, LLC
+.holiday generic Goose Woods, LLC
+.homes generic DERHomes, LLC
+.horse generic Top Level Domain Holdings Limited
+.host generic DotHost Inc.
+.hosting generic Uniregistry, Corp.
+.house generic Sugar Park, LLC
+.how generic Charleston Road Registry Inc.
+.hr country-code CARNet - Croatian Academic and Research Network
+.ht country-code Consortium FDS/RDDH
+.hu country-code Council of Hungarian Internet Providers (CHIP)
+.ibm generic International Business Machines Corporation
+.id country-code Perkumpulan Pengelola Nama Domain Internet Indonesia (PANDI)
+.ie country-code University College Dublin Computing Services Computer Centre
+.ifm generic ifm electronic gmbh
+.il country-code Internet Society of Israel
+.im country-code Isle of Man Government
+.immo generic Auburn Bloom, LLC
+.immobilien generic United TLD Holdco Ltd.
+.in country-code National Internet Exchange of India
+.industries generic Outer House, LLC
+.info generic Afilias Limited
+.ing generic Charleston Road Registry Inc.
+.ink generic Top Level Design, LLC
+.institute generic Outer Maple, LLC
+.insure generic Pioneer Willow, LLC
+.int sponsored Internet Assigned Numbers Authority
+.international generic Wild Way, LLC
+.investments generic Holly Glen, LLC
+.io country-code IO Top Level Domain Registry Cable and Wireless
+.iq country-code Communications and Media Commission (CMC)
+.ir country-code Institute for Research in Fundamental Sciences
+.irish generic Dot-Irish LLC
+.is country-code ISNIC - Internet Iceland ltd.
+.it country-code IIT - CNR
+.iwc generic Richemont DNS Inc.
+.jcb generic JCB Co., Ltd.
+.je country-code Island Networks (Jersey) Ltd.
+.jetzt generic New TLD Company AB
+.jm country-code University of West Indies
+.jo country-code National Information Technology Center (NITC)
+.jobs sponsored Employ Media LLC
+.joburg generic ZA Central Registry NPC trading as ZA Central Registry
+.jp country-code Japan Registry Services Co., Ltd.
+.juegos generic Uniregistry, Corp.
+.kaufen generic United TLD Holdco Ltd.
+.kddi generic KDDI CORPORATION
+.ke country-code Kenya Network Information Center (KeNIC)
+.kg country-code AsiaInfo Telecommunication Enterprise
+.kh country-code Ministry of Post and Telecommunications
+.ki country-code Ministry of Communications, Transport, and Tourism Development
+.kim generic Afilias Limited
+.kitchen generic Just Goodbye, LLC
+.kiwi generic DOT KIWI LIMITED
+.km country-code Comores Telecom
+.kn country-code Ministry of Finance, Sustainable Development Information & Technology
+.koeln generic NetCologne Gesellschaft für Telekommunikation mbH
+.kp country-code Star Joint Venture Company
+.kr country-code Korea Internet & Security Agency (KISA)
+.krd generic KRG Department of Information Technology
+.kred generic KredTLD Pty Ltd
+.kw country-code Ministry of Communications
+.ky country-code The Information and Communications Technology Authority
+.kyoto generic Academic Institution: Kyoto Jyoho Gakuen
+.kz country-code Association of IT Companies of Kazakhstan
+.la country-code Lao National Internet Committee (LANIC), Ministry of Posts and Telecommunications
+.lacaixa generic CAIXA D'ESTALVIS I PENSIONS DE BARCELONA
+.land generic Pine Moon, LLC
+.lat generic ECOM-LAC Federación de Latinoamérica y el Caribe para Internet y el Comercio Electrónico
+.latrobe generic La Trobe University
+.lawyer generic United TLD Holdco, Ltd
+.lb country-code American University of Beirut Computing and Networking Services
+.lc country-code University of Puerto Rico
+.lds generic IRI Domain Management, LLC
+.lease generic Victor Trail, LLC
+.legal generic Blue Falls, LLC
+.lgbt generic Afilias Limited
+.li country-code Universitaet Liechtenstein
+.lidl generic Schwarz Domains und Services GmbH & Co. KG
+.life generic Trixy Oaks, LLC
+.lighting generic John McCook, LLC
+.limited generic Big Fest, LLC
+.limo generic Hidden Frostbite, LLC
+.link generic Uniregistry, Corp.
+.lk country-code Council for Information Technology LK Domain Registrar
+.loans generic June Woods, LLC
+.london generic Dot London Domains Limited
+.lotte generic Lotte Holdings Co., Ltd.
+.lotto generic Afilias Limited
+.lr country-code Data Technology Solutions, Inc.
+.ls country-code National University of Lesotho
+.lt country-code Kaunas University of Technology
+.ltda generic InterNetX Corp.
+.lu country-code RESTENA
+.luxe generic Top Level Domain Holdings Limited
+.luxury generic Luxury Partners LLC
+.lv country-code University of Latvia Institute of Mathematics and Computer Science Department of Network Solutions (DNS)
+.ly country-code General Post and Telecommunication Company
+.ma country-code Agence Nationale de Réglementation des Télécommunications (ANRT)
+.madrid generic Comunidad de Madrid
+.maison generic Victor Frostbite, LLC
+.management generic John Goodbye, LLC
+.mango generic PUNTO FA S.L.
+.market generic Unitied TLD Holdco, Ltd
+.marketing generic Fern Pass, LLC
+.marriott generic Marriott Worldwide Corporation
+.mc country-code Gouvernement de Monaco Direction des Communications Electroniques
+.md country-code MoldData S.E.
+.me country-code Government of Montenegro
+.media generic Grand Glen, LLC
+.meet generic Afilias Limited
+.melbourne generic The Crown in right of the State of Victoria, represented by its Department of State Development, Business and Innovation
+.meme generic Charleston Road Registry Inc.
+.memorial generic Dog Beach, LLC
+.menu generic Wedding TLD2, LLC
+.mf country-code Not assigned
+.mg country-code NIC-MG (Network Information Center Madagascar)
+.mh country-code Office of the Cabinet
+.miami generic Top Level Domain Holdings Limited
+.mil sponsored DoD Network Information Center
+.mini generic Bayerische Motoren Werke Aktiengesellschaft
+.mk country-code Macedonian Academic Research Network Skopje
+.ml country-code Agence des Technologies de l’Information et de la Communication
+.mm country-code Ministry of Communications, Posts & Telegraphs
+.mn country-code Datacom Co., Ltd.
+.mo country-code Bureau of Telecommunications Regulation (DSRT)
+.mobi sponsored Afilias Technologies Limited dba dotMobi
+.moda generic United TLD Holdco Ltd.
+.moe generic Interlink Co., Ltd.
+.monash generic Monash University
+.money generic Outer McCook, LLC
+.mormon generic IRI Domain Management, LLC ("Applicant")
+.mortgage generic United TLD Holdco, Ltd
+.moscow generic Foundation for Assistance for Internet Technologies and Infrastructure Development (FAITID)
+.motorcycles generic DERMotorcycles, LLC
+.mov generic Charleston Road Registry Inc.
+.mp country-code Saipan Datacom, Inc.
+.mq country-code MEDIASERV
+.mr country-code Université des Sciences, de Technologie et de Médecine
+.ms country-code MNI Networks Ltd.
+.mt country-code NIC (Malta)
+.mu country-code Internet Direct Ltd
+.museum sponsored Museum Domain Management Association
+.mv country-code Dhiraagu Pvt. Ltd. (DHIVEHINET)
+.mw country-code Malawi Sustainable Development Network Programme (Malawi SDNP)
+.mx country-code NIC-Mexico ITESM - Campus Monterrey
+.my country-code MYNIC Berhad
+.mz country-code Centro de Informatica de Universidade Eduardo Mondlane
+.na country-code Namibian Network Information Center
+.nagoya generic GMO Registry, Inc.
+.name generic-restricted VeriSign Information Services, Inc.
+.navy generic United TLD Holdco Ltd.
+.nc country-code Office des Postes et Telecommunications
+.ne country-code SONITEL
+.net generic VeriSign Global Registry Services
+.network generic Trixy Manor, LLC
+.neustar generic NeuStar, Inc.
+.new generic Charleston Road Registry Inc.
+.nexus generic Charleston Road Registry Inc.
+.nf country-code Norfolk Island Data Services
+.ng country-code Nigeria Internet Registration Association
+.ngo generic Public Interest Registry
+.nhk generic Japan Broadcasting Corporation (NHK)
+.ni country-code Universidad Nacional del Ingernieria Centro de Computo
+.nico generic DWANGO Co., Ltd.
+.ninja generic United TLD Holdco Ltd.
+.nl country-code SIDN (Stichting Internet Domeinregistratie Nederland)
+.no country-code UNINETT Norid A/S
+.np country-code Mercantile Communications Pvt. Ltd.
+.nr country-code CENPAC NET
+.nra generic NRA Holdings Company, INC.
+.nrw generic Minds + Machines GmbH
+.ntt generic NIPPON TELEGRAPH AND TELEPHONE CORPORATION
+.nu country-code The IUSN Foundation
+.nyc generic The City of New York by and through the New York City Department of Information Technology & Telecommunications
+.nz country-code InternetNZ
+.okinawa generic BusinessRalliart inc.
+.om country-code Telecommunications Regulatory Authority (TRA)
+.one generic One.com A/S
+.ong generic Public Interest Registry
+.onl generic I-REGISTRY Ltd., Niederlassung Deutschland
+.ooo generic INFIBEAM INCORPORATION LIMITED
+.org generic Public Interest Registry (PIR)
+.organic generic Afilias Limited
+.osaka generic Interlink Co., Ltd.
+.otsuka generic Otsuka Holdings Co., Ltd.
+.ovh generic OVH SAS
+.pa country-code Universidad Tecnologica de Panama
+.paris generic City of Paris
+.partners generic Magic Glen, LLC
+.parts generic Sea Goodbye, LLC
+.party generic Blue Sky Registry Limited
+.pe country-code Red Cientifica Peruana
+.pf country-code Gouvernement de la Polynésie française
+.pg country-code PNG DNS Administration Vice Chancellors Office The Papua New Guinea University of Technology
+.ph country-code PH Domain Foundation
+.pharmacy generic National Association of Boards of Pharmacy
+.photo generic Uniregistry, Corp.
+.photography generic Sugar Glen, LLC
+.photos generic Sea Corner, LLC
+.physio generic PhysBiz Pty Ltd
+.pics generic Uniregistry, Corp.
+.pictures generic Foggy Sky, LLC
+.pink generic Afilias Limited
+.pizza generic Foggy Moon, LLC
+.pk country-code PKNIC
+.pl country-code Research and Academic Computer Network
+.place generic Snow Galley, LLC
+.plumbing generic Spring Tigers, LLC
+.pm country-code Association Française pour le Nommage Internet en Coopération (A.F.N.I.C.)
+.pn country-code Pitcairn Island Administration
+.pohl generic Deutsche Vermögensberatung Aktiengesellschaft DVAG
+.poker generic Afilias Domains No. 5 Limited
+.porn generic ICM Registry PN LLC
+.post sponsored Universal Postal Union
+.pr country-code Gauss Research Laboratory Inc.
+.praxi generic Praxi S.p.A.
+.press generic DotPress Inc.
+.pro generic-restricted Registry Services Corporation dba RegistryPro
+.prod generic Charleston Road Registry Inc.
+.productions generic Magic Birch, LLC
+.prof generic Charleston Road Registry Inc.
+.properties generic Big Pass, LLC
+.property generic Uniregistry, Corp.
+.ps country-code Ministry Of Telecommunications & Information Technology, Government Computer Center.
+.pt country-code Associação DNS.PT
+.pub generic United TLD Holdco Ltd.
+.pw country-code Micronesia Investment and Development Corporation
+.py country-code NIC-PY
+.qa country-code Communications Regulatory Authority
+.qpon generic dotCOOL, Inc.
+.quebec generic PointQuébec Inc
+.re country-code Association Française pour le Nommage Internet en Coopération (A.F.N.I.C.)
+.realtor generic Real Estate Domains LLC
+.recipes generic Grand Island, LLC
+.red generic Afilias Limited
+.rehab generic United TLD Holdco Ltd.
+.reise generic dotreise GmbH
+.reisen generic New Cypress, LLC
+.reit generic National Association of Real Estate Investment Trusts, Inc.
+.ren generic Beijing Qianxiang Wangjing Technology Development Co., Ltd.
+.rentals generic Big Hollow,LLC
+.repair generic Lone Sunset, LLC
+.report generic Binky Glen, LLC
+.republican generic United TLD Holdco Ltd.
+.rest generic Punto 2012 Sociedad Anonima Promotora de Inversion de Capital Variable
+.restaurant generic Snow Avenue, LLC
+.reviews generic United TLD Holdco, Ltd.
+.rich generic I-REGISTRY Ltd., Niederlassung Deutschland
+.rio generic Empresa Municipal de Informática SA - IPLANRIO
+.rip generic United TLD Holdco Ltd.
+.ro country-code National Institute for R&D in Informatics
+.rocks generic United TLD Holdco, LTD.
+.rodeo generic Top Level Domain Holdings Limited
+.rs country-code Serbian National Internet Domain Registry (RNIDS)
+.rsvp generic Charleston Road Registry Inc.
+.ru country-code Coordination Center for TLD RU
+.ruhr generic regiodot GmbH & Co. KG
+.rw country-code Rwanda Information Communication and Technology Association (RICTA)
+.ryukyu generic BusinessRalliart inc.
+.sa country-code Communications and Information Technology Commission
+.saarland generic dotSaarland GmbH
+.sale generic United TLD Holdco, Ltd
+.samsung generic SAMSUNG SDS CO., LTD
+.sarl generic Delta Orchard, LLC
+.saxo generic Saxo Bank A/S
+.sb country-code Solomon Telekom Company Limited
+.sc country-code VCS Pty Ltd
+.sca generic SVENSKA CELLULOSA AKTIEBOLAGET SCA (publ)
+.scb generic The Siam Commercial Bank Public Company Limited ("SCB")
+.schmidt generic SALM S.A.S.
+.schule generic Outer Moon, LLC
+.schwarz generic Schwarz Domains und Services GmbH & Co. KG
+.science generic dot Science Limited
+.scot generic Dot Scot Registry Limited
+.sd country-code Sudan Internet Society
+.se country-code The Internet Infrastructure Foundation
+.services generic Fox Castle, LLC
+.sew generic SEW-EURODRIVE GmbH & Co KG
+.sexy generic Uniregistry, Corp.
+.sg country-code Singapore Network Information Centre (SGNIC) Pte Ltd
+.sh country-code Government of St. Helena
+.shiksha generic Afilias Limited
+.shoes generic Binky Galley, LLC
+.shriram generic Shriram Capital Ltd.
+.si country-code Academic and Research Network of Slovenia (ARNES)
+.singles generic Fern Madison, LLC
+.sj country-code UNINETT Norid A/S
+.sk country-code SK-NIC, a.s.
+.sky generic Sky IP International Ltd, a company incorporated in England and Wales, operating via its registered Swiss branch
+.sl country-code Sierratel
+.sm country-code Telecom Italia San Marino S.p.A.
+.sn country-code Universite Cheikh Anta Diop NIC Senegal
+.so country-code Ministry of Post and Telecommunications
+.social generic United TLD Holdco Ltd.
+.software generic United TLD Holdco, Ltd
+.sohu generic Sohu.com Limited
+.solar generic Ruby Town, LLC
+.solutions generic Silver Cover, LLC
+.soy generic Charleston Road Registry Inc.
+.space generic DotSpace Inc.
+.spiegel generic SPIEGEL-Verlag Rudolf Augstein GmbH & Co. KG
+.sr country-code Telesur
+.ss country-code Not assigned
+.st country-code Tecnisys
+.style generic Binky Moon, LLC
+.su country-code Russian Institute for Development of Public Networks (ROSNIIROS)
+.supplies generic Atomic Fields, LLC
+.supply generic Half Falls, LLC
+.support generic Grand Orchard, LLC
+.surf generic Top Level Domain Holdings Limited
+.surgery generic Tin Avenue, LLC
+.suzuki generic SUZUKI MOTOR CORPORATION
+.sv country-code SVNet
+.sx country-code SX Registry SA B.V.
+.sy country-code National Agency for Network Services (NANS)
+.sydney generic State of New South Wales, Department of Premier and Cabinet
+.systems generic Dash Cypress, LLC
+.sz country-code University of Swaziland Department of Computer Science
+.taipei generic Taipei City Government
+.tatar generic Limited Liability Company "Coordination Center of Regional Domain of Tatarstan Republic"
+.tattoo generic Uniregistry, Corp.
+.tax generic Storm Orchard, LLC
+.tc country-code Melrex TC
+.td country-code Société des télécommunications du Tchad (SOTEL TCHAD)
+.technology generic Auburn Falls, LLC
+.tel sponsored Telnic Ltd.
+.temasek generic Temasek Holdings (Private) Limited
+.tennis generic Cotton Bloom, LLC
+.tf country-code Association Française pour le Nommage Internet en Coopération (A.F.N.I.C.)
+.tg country-code Cafe Informatique et Telecommunications
+.th country-code Thai Network Information Center Foundation
+.tienda generic Victor Manor, LLC
+.tips generic Corn Willow, LLC
+.tires generic Dog Edge, LLC
+.tirol generic punkt Tirol GmbH
+.tj country-code Information Technology Center
+.tk country-code Telecommunication Tokelau Corporation (Teletok)
+.tl country-code Ministry of Transport and Communications; National Division of Information and Technology
+.tm country-code TM Domain Registry Ltd
+.tn country-code Agence Tunisienne d'Internet
+.to country-code Government of the Kingdom of Tonga H.R.H. Crown Prince Tupouto'a c/o Consulate of Tonga
+.today generic Pearl Woods, LLC
+.tokyo generic GMO Registry, Inc.
+.tools generic Pioneer North, LLC
+.top generic Jiangsu Bangning Science & Technology Co.,Ltd.
+.toshiba generic TOSHIBA Corporation
+.town generic Koko Moon, LLC
+.toys generic Pioneer Orchard, LLC
+.tp country-code -
+.tr country-code Middle East Technical University Department of Computer Engineering
+.trade generic Elite Registry Limited
+.training generic Wild Willow, LLC
+.travel sponsored Tralliance Registry Management Company, LLC.
+.trust generic Artemis Internet Inc
+.tt country-code University of the West Indies Faculty of Engineering
+.tui generic TUI AG
+.tv country-code Ministry of Finance and Tourism
+.tw country-code Taiwan Network Information Center (TWNIC)
+.tz country-code Tanzania Network Information Centre (tzNIC)
+.ua country-code Hostmaster Ltd.
+.ug country-code Uganda Online Ltd.
+.uk country-code Nominet UK
+.um country-code Not assigned
+.university generic Little Station, LLC
+.uno generic Dot Latin LLC
+.uol generic UBN INTERNET LTDA.
+.us country-code NeuStar, Inc.
+.uy country-code SeCIU - Universidad de la Republica
+.uz country-code Computerization and Information Technologies Developing Center UZINFOCOM
+.va country-code Holy See Secretariat of State Department of Telecommunications
+.vacations generic Atomic Tigers, LLC
+.vc country-code Ministry of Telecommunications, Science, Technology and Industry
+.ve country-code Comisión Nacional de Telecomunicaciones (CONATEL)
+.vegas generic Dot Vegas, Inc.
+.ventures generic Binky Lake, LLC
+.versicherung generic dotversicherung-registry GmbH
+.vet generic United TLD Holdco, Ltd
+.vg country-code Telecommunications Regulatory Commission of the Virgin Islands
+.vi country-code Virgin Islands Public Telcommunications System c/o COBEX Internet Services
+.viajes generic Black Madison, LLC
+.video generic United TLD Holdco, Ltd
+.villas generic New Sky, LLC
+.vision generic Koko Station, LLC
+.vlaanderen generic DNS.be vzw
+.vn country-code Ministry of Information and Communications of Socialist Republic of Viet Nam
+.vodka generic Top Level Domain Holdings Limited
+.vote generic Monolith Registry LLC
+.voting generic Valuetainment Corp.
+.voto generic Monolith Registry LLC
+.voyage generic Ruby House, LLC
+.vu country-code Telecom Vanuatu Limited
+.wales generic Nominet UK
+.wang generic Zodiac Registry Limited
+.watch generic Sand Shadow, LLC
+.webcam generic dot Webcam Limited
+.website generic DotWebsite Inc.
+.wed generic Atgron, Inc.
+.wedding generic Top Level Domain Holdings Limited
+.wf country-code Association Française pour le Nommage Internet en Coopération (A.F.N.I.C.)
+.whoswho generic Who's Who Registry
+.wien generic punkt.wien GmbH
+.wiki generic Top Level Design, LLC
+.williamhill generic William Hill Organization Limited
+.wme generic William Morris Endeavor Entertainment, LLC
+.work generic Top Level Domain Holdings Limited
+.works generic Little Dynamite, LLC
+.world generic Bitter Fields, LLC
+.ws country-code Government of Samoa Ministry of Foreign Affairs & Trade
+.wtc generic World Trade Centers Association, Inc.
+.wtf generic Hidden Way, LLC
+.测试 test Internet Assigned Numbers Authority
+.परीक्षा test Internet Assigned Numbers Authority
+.佛山 generic Guangzhou YU Wei Information Technology Co., Ltd.
+.集团 generic Eagle Horizon Limited
+.在线 generic TLD REGISTRY LIMITED
+.한국 country-code KISA (Korea Internet & Security Agency)
+.ভারত country-code National Internet Exchange of India
+.八卦 generic Zodiac Scorpio Limited
+.موقع generic Suhub Electronic Establishment
+.বাংলা country-code Not assigned
+.公益 generic China Organizational Name Administration Center
+.公司 generic Computer Network Information Center of Chinese Academy of Sciences (China Internet Network Information Center)
+.移动 generic Afilias Limited
+.我爱你 generic Tycoon Treasure Limited
+.москва generic Foundation for Assistance for Internet Technologies and Infrastructure Development (FAITID)
+.испытание test Internet Assigned Numbers Authority
+.қаз country-code Association of IT Companies of Kazakhstan
+.онлайн generic CORE Association
+.сайт generic CORE Association
+.срб country-code Serbian National Internet Domain Registry (RNIDS)
+.бел country-code Reliable Software Inc.
+.테스트 test Internet Assigned Numbers Authority
+.淡马锡 generic Temasek Holdings (Private) Limited
+.орг generic Public Interest Registry
+.삼성 generic SAMSUNG SDS CO., LTD
+.சிங்கப்பூர் country-code Singapore Network Information Centre (SGNIC) Pte Ltd
+.商标 generic HU YI GLOBAL INFORMATION RESOURCES(HOLDING) COMPANY.HONGKONG LIMITED
+.商店 generic Wild Island, LLC
+.商城 generic Zodiac Aquarius Limited
+.дети generic The Foundation for Network Initiatives “The Smart Internet”
+.мкд country-code Macedonian Academic Research Network Skopje
+.טעסט test Internet Assigned Numbers Authority
+.中文网 generic TLD REGISTRY LIMITED
+.中信 generic CITIC Group Corporation
+.中国 country-code China Internet Network Information Center
+.中國 country-code China Internet Network Information Center
+.谷歌 generic Charleston Road Registry Inc.
+.భారత్ country-code National Internet Exchange of India
+.ලංකා country-code LK Domain Registry
+.測試 test Internet Assigned Numbers Authority
+.ભારત country-code National Internet Exchange of India
+.भारत country-code National Internet Exchange of India
+.آزمایشی test Internet Assigned Numbers Authority
+.பரிட்சை test Internet Assigned Numbers Authority
+.网店 generic Zodiac Libra Limited
+.संगठन generic Public Interest Registry
+.网络 generic Computer Network Information Center of Chinese Academy of Sciences (China Internet Network Information Center)
+.укр country-code Ukrainian Network Information Centre (UANIC), Inc.
+.香港 country-code Hong Kong Internet Registration Corporation Ltd.
+.δοκιμή test Internet Assigned Numbers Authority
+.إختبار test Internet Assigned Numbers Authority
+.台湾 country-code Taiwan Network Information Center (TWNIC)
+.台灣 country-code Taiwan Network Information Center (TWNIC)
+.手机 generic Beijing RITT-Net Technology Development Co., Ltd
+.мон country-code Datacom Co.,Ltd
+.الجزائر country-code CERIST
+.عمان country-code Telecommunications Regulatory Authority (TRA)
+.ایران country-code Institute for Research in Fundamental Sciences (IPM)
+.امارات country-code Telecommunications Regulatory Authority (TRA)
+.بازار generic CORE Association
+.پاکستان country-code Not assigned
+.الاردن country-code National Information Technology Center (NITC)
+.بھارت country-code National Internet Exchange of India
+.المغرب country-code Agence Nationale de Réglementation des Télécommunications (ANRT)
+.السعودية country-code Communications and Information Technology Commission
+.سودان country-code Not assigned
+.عراق country-code Not assigned
+.مليسيا country-code MYNIC Berhad
+.شبكة generic International Domain Registry Pty. Ltd.
+.გე country-code Information Technologies Development Center (ITDC)
+.机构 generic Public Interest Registry
+.组织机构 generic Public Interest Registry
+.ไทย country-code Thai Network Information Center Foundation
+.سورية country-code National Agency for Network Services (NANS)
+.рус generic Rusnames Limited
+.рф country-code Coordination Center for TLD RU
+.تونس country-code Agence Tunisienne d'Internet
+.みんな generic Charleston Road Registry Inc.
+.グーグル generic Charleston Road Registry Inc.
+.世界 generic Stable Tone Limited
+.ਭਾਰਤ country-code National Internet Exchange of India
+.网址 generic KNET Co., Ltd
+.游戏 generic Spring Fields, LLC
+.vermögensberater generic Deutsche Vermögensberatung Aktiengesellschaft DVAG
+.vermögensberatung generic Deutsche Vermögensberatung Aktiengesellschaft DVAG
+.企业 generic Dash McCook, LLC
+.مصر country-code National Telecommunication Regulatory Authority - NTRA
+.قطر country-code Communications Regulatory Authority
+.广东 generic Guangzhou YU Wei Information Technology Co., Ltd.
+.இலங்கை country-code LK Domain Registry
+.இந்தியா country-code National Internet Exchange of India
+.հայ country-code Not assigned
+.新加坡 country-code Singapore Network Information Centre (SGNIC) Pte Ltd
+.فلسطين country-code Ministry of Telecom & Information Technology (MTIT)
+.テスト test Internet Assigned Numbers Authority
+.政务 generic China Organizational Name Administration Center
+.xxx sponsored ICM Registry LLC
+.xyz generic XYZ.COM LLC
+.yachts generic DERYachts, LLC
+.yandex generic YANDEX, LLC
+.ye country-code TeleYemen
+.yodobashi generic YODOBASHI CAMERA CO.,LTD.
+.yoga generic Top Level Domain Holdings Limited
+.yokohama generic GMO Registry, Inc.
+.youtube generic Charleston Road Registry Inc.
+.yt country-code Association Française pour le Nommage Internet en Coopération (A.F.N.I.C.)
+.za country-code ZA Domain Name Authority
+.zip generic Charleston Road Registry Inc.
+.zm country-code Zambia Information and Communications Technology Authority (ZICTA)
+.zone generic Outer Falls, LLC
+.zuerich generic Kanton Zürich (Canton of Zurich)
+.zw country-code Postal and Telecommunications Regulatory Authority of Zimbabwe (POTRAZ)
\ No newline at end of file
diff --git a/concurrency/flags/vaurien_delay.sh b/attic/concurrency/flags/vaurien_delay.sh
similarity index 100%
rename from concurrency/flags/vaurien_delay.sh
rename to attic/concurrency/flags/vaurien_delay.sh
diff --git a/concurrency/flags/vaurien_error_delay.sh b/attic/concurrency/flags/vaurien_error_delay.sh
similarity index 100%
rename from concurrency/flags/vaurien_error_delay.sh
rename to attic/concurrency/flags/vaurien_error_delay.sh
diff --git a/attic/concurrency/parallel/lelo_ex.py b/attic/concurrency/parallel/lelo_ex.py
new file mode 100644
index 0000000..f7802a0
--- /dev/null
+++ b/attic/concurrency/parallel/lelo_ex.py
@@ -0,0 +1,24 @@
+import os
+from time import sleep, time
+
+from lelo import parallel
+
+DELAY = .2
+
+@parallel
+def loiter(serial, delay):
+ pid = os.getpid()
+ print('%2d pid = %d' % (serial, pid))
+ sleep(delay)
+ return pid
+
+t0 = time()
+
+results = []
+for i in range(15):
+ res = loiter(i, DELAY)
+ results.append(res)
+
+print('Processes used: ', list(set(results)))
+
+print('### Elapsed time: %0.2f' % (time() - t0))
diff --git a/attic/concurrency/parallel/llize.py b/attic/concurrency/parallel/llize.py
new file mode 100644
index 0000000..7e4f873
--- /dev/null
+++ b/attic/concurrency/parallel/llize.py
@@ -0,0 +1,19 @@
+import os
+from parallelize import parallelize
+from time import sleep, time
+
+print('one process:')
+t0 = time()
+for i in range(12):
+ print('%2d pid = %d' % (i, os.getpid()))
+ sleep(.2)
+print('elapsed time: %0.2f' % (time() - t0))
+
+print()
+
+print('several processes:')
+t0 = time()
+for i in parallelize(range(12)):
+ print('%2d pid = %d' % (i, os.getpid()))
+ sleep(.2)
+print('elapsed time: %0.2f' % (time() - t0))
diff --git a/attic/concurrency/parallel/llize_ex.py b/attic/concurrency/parallel/llize_ex.py
new file mode 100644
index 0000000..2bbc59c
--- /dev/null
+++ b/attic/concurrency/parallel/llize_ex.py
@@ -0,0 +1,23 @@
+import os
+from time import sleep, time
+
+from parallelize import parallelize, per_item
+
+DELAY = .2
+
+def loiter(serial, delay):
+ pid = os.getpid()
+ print('%2d pid = %d' % (serial, pid))
+ sleep(delay)
+ return pid
+
+t0 = time()
+
+results = []
+for i in parallelize(range(15), fork=per_item):
+ res = loiter(i, DELAY)
+ results.append(res)
+
+print('Processes used: ', list(set(results)))
+
+print('### Elapsed time: %0.2f' % (time() - t0))
diff --git a/attic/concurrency/spinner_asyncio.py b/attic/concurrency/spinner_asyncio.py
new file mode 100644
index 0000000..81097e4
--- /dev/null
+++ b/attic/concurrency/spinner_asyncio.py
@@ -0,0 +1,42 @@
+# spinner_asyncio.py
+
+# credits: Example by Luciano Ramalho inspired by
+# Michele Simionato's multiprocessing example
+# source:
+# http://python-3-patterns-idioms-test.readthedocs.org/en/latest/CoroutinesAndConcurrency.html
+
+import sys
+import asyncio
+
+DELAY = 0.1
+DISPLAY = '|/-\\'
+
+@asyncio.coroutine
+def spinner_func(before='', after=''):
+ write, flush = sys.stdout.write, sys.stdout.flush
+ while True:
+ for char in DISPLAY:
+ msg = '{} {} {}'.format(before, char, after)
+ write(msg)
+ flush()
+ write('\x08' * len(msg))
+ try:
+ yield from asyncio.sleep(DELAY)
+ except asyncio.CancelledError:
+ return
+
+
+@asyncio.coroutine
+def long_computation(delay):
+ # emulate a long computation
+ yield from asyncio.sleep(delay)
+
+
+if __name__ == '__main__':
+ loop = asyncio.get_event_loop()
+ spinner = loop.create_task(spinner_func('Please wait...', 'thinking!'))
+ long_task = loop.create_task(long_computation(3))
+ long_task.add_done_callback(lambda f: spinner.cancel())
+ loop.run_until_complete(spinner)
+ loop.close()
+
diff --git a/attic/concurrency/spinner_asyncio2.py b/attic/concurrency/spinner_asyncio2.py
new file mode 100644
index 0000000..5cea214
--- /dev/null
+++ b/attic/concurrency/spinner_asyncio2.py
@@ -0,0 +1,46 @@
+# spinner_asyncio2.py
+
+# credits: Example by Luciano Ramalho inspired by
+# Michele Simionato's multiprocessing example
+# source:
+# http://python-3-patterns-idioms-test.readthedocs.org/en/latest/CoroutinesAndConcurrency.html
+
+import sys
+import asyncio
+
+DELAY = 0.1
+DISPLAY = '|/-\\'
+
+@asyncio.coroutine
+def spinner_func(before='', after=''):
+ write, flush = sys.stdout.write, sys.stdout.flush
+ while True:
+ for char in DISPLAY:
+ msg = '{} {} {}'.format(before, char, after)
+ write(msg)
+ flush()
+ write('\x08' * len(msg))
+ try:
+ yield from asyncio.sleep(DELAY)
+ except asyncio.CancelledError:
+ return
+
+
+@asyncio.coroutine
+def long_computation(delay):
+ # emulate a long computation
+ yield from asyncio.sleep(delay)
+
+
+@asyncio.coroutine
+def supervisor(delay):
+ spinner = loop.create_task(spinner_func('Please wait...', 'thinking!'))
+ yield from long_computation(delay)
+ spinner.cancel()
+
+
+if __name__ == '__main__':
+ loop = asyncio.get_event_loop()
+ loop.run_until_complete(supervisor(3))
+ loop.close()
+
diff --git a/concurrency/spinner_proc.py b/attic/concurrency/spinner_proc.py
similarity index 100%
rename from concurrency/spinner_proc.py
rename to attic/concurrency/spinner_proc.py
diff --git a/concurrency/spinner_thread.py b/attic/concurrency/spinner_thread.py
similarity index 100%
rename from concurrency/spinner_thread.py
rename to attic/concurrency/spinner_thread.py
diff --git a/concurrency/timer.py b/attic/concurrency/timer.py
similarity index 100%
rename from concurrency/timer.py
rename to attic/concurrency/timer.py
diff --git a/concurrency/timer2.py b/attic/concurrency/timer2.py
similarity index 100%
rename from concurrency/timer2.py
rename to attic/concurrency/timer2.py
diff --git a/concurrency/timer_cb.py b/attic/concurrency/timer_cb.py
similarity index 100%
rename from concurrency/timer_cb.py
rename to attic/concurrency/timer_cb.py
diff --git a/concurrency/timer_clo.py b/attic/concurrency/timer_clo.py
similarity index 100%
rename from concurrency/timer_clo.py
rename to attic/concurrency/timer_clo.py
diff --git a/concurrency/timer_seq.py b/attic/concurrency/timer_seq.py
similarity index 100%
rename from concurrency/timer_seq.py
rename to attic/concurrency/timer_seq.py
diff --git a/attic/concurrency/wikipedia/README.rst b/attic/concurrency/wikipedia/README.rst
new file mode 100644
index 0000000..dfe168a
--- /dev/null
+++ b/attic/concurrency/wikipedia/README.rst
@@ -0,0 +1,138 @@
+====================================
+Configuring a local test environment
+====================================
+
+tl;dr;
+======
+
+This text explains how to configure **nginx** and **vaurien** to build
+a local mirror of the data to run the Wikipedia Picture of the Day
+examples while avoiding network traffic and introducing controlled
+delays and errors for testing, thanks to the **vaurien** proxy.
+
+
+Rationale and overview
+======================
+
+The Wikipedia Picture of the Day examples are designed to demonstrate
+the performance of different approaches to finding and downloading
+images from the Wikipedia. However, we don't want to hit the Wikipedia
+with multiple requests per second while testing, and we want to be
+able to simulate high latency and random network errors.
+
+For this setup I chose **nginx** as the HTTP server because it is very
+fast and easy to configure, and the **vaurien** proxy because it was
+designed by Mozilla to introduce delays and network errors for testing.
+
+The initial fixture data, ``docroot.zip``, contains a directory
+``docroot/Template-POTD/`` with 1096 small text files, each consisting
+of an HTML fragment (just a ``src="..."`` attribute) or an error message
+(for days when no picture was published, like 2013-09-12). These files
+correspond to every day of the years 2012, 2013 and 2014. The year 2012
+was a leap year, that's why there are 1096 files and not 1095.
+
+Once these files are unpacked to the ``docroot/Template-POTD`` directory
+and **nginx** is configured, the ``build_fixture.py`` script can fetch the
+actual images from the Wikipedia for local storage in the directory
+``docroot/wikimedia/``.
+
+When that is done you can configure **nginx** and **vaurien** to experiment
+with the ``daypicts*.py``examples without hitting the network.
+
+
+Instructions
+============
+
+1. Unpack test data
+-------------------
+
+Unpack the initial data in the ``fixture/`` directory and verify that 1096
+files were created in ``fixture/docroot/Template-POTD/``::
+
+ $ ls # inside the fixture/ directory
+ README.rst docroot.zip
+ $ unzip docroot.zip
+ ... many lines omitted...
+ inflating: docroot/Template-POTD/2014-12-29
+ inflating: docroot/Template-POTD/2014-12-30
+ inflating: docroot/Template-POTD/2014-12-31
+ $ ls docroot/Template-POTD/ | wc -w
+ 1096
+
+
+2. Install **nginx**
+--------------------
+
+Download and install **nginx**. I used version 1.6.2 -- the latest
+stable version as I write this.
+
+- Download page: http://nginx.org/en/download.html
+
+- Beginner's guide: http://nginx.org/en/docs/beginners_guide.html
+
+
+3. Configure **nginx**
+----------------------
+
+Edit the the ``nginx.conf`` file to set the port and document root.
+The file is usually found in ``/usr/local/nginx/conf``, ``/etc/nginx``,
+or ``/usr/local/etc/nginx``.
+
+Most of the content in ``nginx.conf`` is within a block labeled ``http``
+and enclosed in curly braces. Within that block there can be multiple
+blocks labeled ``server``. Add another ``server`` block like this one::
+
+ server {
+ listen 8001;
+
+ location / {
+ root /full-path-to.../fixture/docroot;
+ }
+ }
+
+After editing ``nginx.conf`` the server must be started (if it's not
+running) or told to reload the configuration file::
+
+ $ nginx # to start, if necessary
+ $ nginx -s reload # to reload the configuration
+
+To test the configuration, open the URL below in a browser. Doing so
+will download a small file named ``2014-01-01`` with an HTML fragment::
+
+ http://localhost:8001/Template-POTD/2014-01-01
+
+If the test fails, please double check the procedure just described and
+refer to the **nginx** documentation.
+
+
+Platform-specific instructions
+==============================
+
+Nginx setup on Mac OS X
+-----------------------
+
+Homebrew (copy & paste code at the bottom of http://brew.sh/)::
+
+ $ ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
+ $ brew doctor
+ $ brew install nginx
+
+Download and unpack::
+
+Docroot is: /usr/local/var/www
+/usr/local/etc/nginx/nginx.conf
+
+To have launchd start nginx at login:
+ ln -sfv /usr/local/opt/nginx/*.plist ~/Library/LaunchAgents
+Then to load nginx now:
+ launchctl load ~/Library/LaunchAgents/homebrew.mxcl.nginx.plist
+Or, if you don't want/need launchctl, you can just run:
+ nginx
+
+
+
+Nginx setup on Lubuntu 14.04.1 LTS
+----------------------------------
+
+Docroot is: /usr/share/nginx/html
+
diff --git a/concurrency/wikipedia/build_fixture.py b/attic/concurrency/wikipedia/build_fixture.py
similarity index 100%
rename from concurrency/wikipedia/build_fixture.py
rename to attic/concurrency/wikipedia/build_fixture.py
diff --git a/concurrency/wikipedia/daypicts.py b/attic/concurrency/wikipedia/daypicts.py
similarity index 95%
rename from concurrency/wikipedia/daypicts.py
rename to attic/concurrency/wikipedia/daypicts.py
index 34dc92b..be3aa6f 100644
--- a/concurrency/wikipedia/daypicts.py
+++ b/attic/concurrency/wikipedia/daypicts.py
@@ -28,12 +28,14 @@
SAVE_DIR = 'downloaded/'
-#POTD_BASE_URL = 'http://en.wikipedia.org/wiki/Template:POTD/'
-POTD_BASE_URL = 'http://127.0.0.1:8001/Template-POTD/'
+HTTP_PORT = 8002
+POTD_BASE_URL = 'http://en.wikipedia.org/wiki/Template:POTD/'
+#POTD_BASE_URL = 'http://127.0.0.1:{}/Template-POTD/'.format(HTTP_PORT)
REMOTE_PICT_BASE_URL = 'http://upload.wikimedia.org/wikipedia/'
-LOCAL_PICT_BASE_URL = 'http://127.0.0.1:8001/'
-PICT_BASE_URL = LOCAL_PICT_BASE_URL
+#LOCAL_PICT_BASE_URL = 'http://127.0.0.1:{}/'.format(HTTP_PORT)
+LOCAL_PICT_BASE_URL = REMOTE_PICT_BASE_URL
+PICT_BASE_URL = REMOTE_PICT_BASE_URL
POTD_IMAGE_RE = re.compile(r'src="(//upload\..*?)"')
PODT_EARLIEST_TEMPLATE = '2007-01-01'
diff --git a/concurrency/wikipedia/daypicts_asyncio.py b/attic/concurrency/wikipedia/daypicts_asyncio.py
similarity index 100%
rename from concurrency/wikipedia/daypicts_asyncio.py
rename to attic/concurrency/wikipedia/daypicts_asyncio.py
diff --git a/concurrency/wikipedia/daypicts_threads.py b/attic/concurrency/wikipedia/daypicts_threads.py
similarity index 100%
rename from concurrency/wikipedia/daypicts_threads.py
rename to attic/concurrency/wikipedia/daypicts_threads.py
diff --git a/attic/concurrency/wikipedia/delay.sh b/attic/concurrency/wikipedia/delay.sh
new file mode 100755
index 0000000..34ad0a9
--- /dev/null
+++ b/attic/concurrency/wikipedia/delay.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+vaurien --protocol http --proxy localhost:8002 --backend localhost:8001 \
+ --behavior 100:delay --behavior-delay-sleep .1
\ No newline at end of file
diff --git a/concurrency/wikipedia/fast_tests.sh b/attic/concurrency/wikipedia/fast_tests.sh
similarity index 100%
rename from concurrency/wikipedia/fast_tests.sh
rename to attic/concurrency/wikipedia/fast_tests.sh
diff --git a/concurrency/wikipedia/fixture/README.rst b/attic/concurrency/wikipedia/fixture/README.rst
similarity index 58%
rename from concurrency/wikipedia/fixture/README.rst
rename to attic/concurrency/wikipedia/fixture/README.rst
index abc47bd..0ae35c8 100644
--- a/concurrency/wikipedia/fixture/README.rst
+++ b/attic/concurrency/wikipedia/fixture/README.rst
@@ -5,10 +5,10 @@ Configuring a local test environment
tl;dr;
======
-This text explains how to configure *nginx* and *vaurien* to build
-a local mirror of the data to test the Wikipedia Picture of the Day
-examples while saving network bandwidth and introducing controlled
-delays and errors, thanks to the *vaurien* proxy.
+This text explains how to configure **nginx** and **vaurien** to build
+a local mirror of the data to run the Wikipedia Picture of the Day
+examples while avoiding network traffic and introducing controlled
+delays and errors for testing, thanks to the **vaurien** proxy.
Rationale and overview
@@ -20,8 +20,8 @@ images from the Wikipedia. However, we don't want to hit the Wikipedia
with multiple requests per second while testing, and we want to be
able to simulate high latency and random network errors.
-For this setup I chose *nginx* as the HTTP server because it is very
-fast and easy to configure, and the *vaurien* proxy because it was
+For this setup I chose **nginx** as the HTTP server because it is very
+fast and easy to configure, and the **vaurien** proxy because it was
designed by Mozilla to introduce delays and network errors for testing.
The initial fixture data, ``docroot.zip``, contains a directory
@@ -32,22 +32,25 @@ correspond to every day of the years 2012, 2013 and 2014. The year 2012
was a leap year, that's why there are 1096 files and not 1095.
Once these files are unpacked to the ``docroot/Template-POTD`` directory
-and *nginx* is configured, the ``build_fixture.py`` script can fetch the
+and **nginx** is configured, the ``build_fixture.py`` script can fetch the
actual images from the Wikipedia for local storage in the directory
``docroot/wikimedia/``.
-When that is done you can configure *nginx* and *vaurien* to experiment
+When that is done you can configure **nginx** and **vaurien** to experiment
with the ``daypicts*.py``examples without hitting the network.
Instructions
============
-1. Unpack data
---------------
+1. Unpack test data
+-------------------
-Unpack the initial data and verify that 1096 files were created::
+Unpack the initial data in the ``fixture/`` directory and verify that 1096
+files were created in ``fixture/docroot/Template-POTD/``::
+ $ ls # inside the fixture/ directory
+ README.rst docroot.zip
$ unzip docroot.zip
... many lines omitted...
inflating: docroot/Template-POTD/2014-12-29
@@ -57,10 +60,10 @@ Unpack the initial data and verify that 1096 files were created::
1096
-2. Install *nginx*
-------------------
+2. Install **nginx**
+--------------------
-Download and install *nginx*. I used version 1.6.2 -- the latest
+Download and install **nginx**. I used version 1.6.2 -- the latest
stable version as I write this.
- Download page: http://nginx.org/en/download.html
@@ -68,8 +71,8 @@ stable version as I write this.
- Beginner's guide: http://nginx.org/en/docs/beginners_guide.html
-3. Configure *nginx*
---------------------
+3. Configure **nginx**
+----------------------
Edit the the ``nginx.conf`` file to set the port and document root.
The file is usually found in ``/usr/local/nginx/conf``, ``/etc/nginx``,
@@ -83,10 +86,20 @@ blocks labeled ``server``. Add another ``server`` block like this one::
listen 8001;
location / {
- root /full-path-to-your-directory/fixture/docroot;
+ root /full-path-to.../fixture/docroot;
}
}
+After editing ``nginx.conf`` the server must be started (if it's not
+running) or told to reload the configuration file::
+
+ $ nginx # to start, if necessary
+ $ nginx -s reload # to reload the configuration
+
+To test the configuration, open the URL below in a browser. Doing so
+will download a small file named ``2014-01-01`` with an HTML fragment::
+ http://localhost:8001/Template-POTD/2014-01-01
-http://localhost:8001/Template-POTD/2014-01-01
+If the test fails, please double check the procedure just described and
+refer to the **nginx** documentation.
diff --git a/concurrency/wikipedia/fixture/docroot.zip b/attic/concurrency/wikipedia/fixture/docroot.zip
similarity index 100%
rename from concurrency/wikipedia/fixture/docroot.zip
rename to attic/concurrency/wikipedia/fixture/docroot.zip
diff --git a/attic/concurrency/wikipedia/orig/README.rst b/attic/concurrency/wikipedia/orig/README.rst
new file mode 100644
index 0000000..4a56d28
--- /dev/null
+++ b/attic/concurrency/wikipedia/orig/README.rst
@@ -0,0 +1,39 @@
+=====================================
+Wikipedia Picture of the Day examples
+=====================================
+
+These examples use various asynchronous programming techniques to download
+images and metadata from the English Wikipedia `Picture of the Day`_ archive.
+
+.. _Picture of the Day: http://en.wikipedia.org/wiki/Wikipedia:Picture_of_the_day/Archive
+
+
+--------
+Timings
+--------
+
+``sync.py``
+===========
+
+::
+
+ $ time python sync.py 2014-06 -q 5
+ 5 images downloaded (167.8 Kbytes total)
+
+ real 0m6.272s
+ user 0m0.065s
+ sys 0m0.039s
+
+ $ time python sync.py 2014-06 -q 5
+ 5 images downloaded (167.8 Kbytes total)
+
+ real 0m5.447s
+ user 0m0.068s
+ sys 0m0.040s
+
+ $ time python sync.py 2014-06 -q 5
+ 5 images downloaded (167.8 Kbytes total)
+
+ real 0m6.314s
+ user 0m0.068s
+ sys 0m0.040s
diff --git a/concurrency/wikipedia/orig/futureprocs.py b/attic/concurrency/wikipedia/orig/futureprocs.py
similarity index 100%
rename from concurrency/wikipedia/orig/futureprocs.py
rename to attic/concurrency/wikipedia/orig/futureprocs.py
diff --git a/concurrency/wikipedia/orig/futurethreads.py b/attic/concurrency/wikipedia/orig/futurethreads.py
similarity index 100%
rename from concurrency/wikipedia/orig/futurethreads.py
rename to attic/concurrency/wikipedia/orig/futurethreads.py
diff --git a/concurrency/wikipedia/orig/potd.py b/attic/concurrency/wikipedia/orig/potd.py
similarity index 100%
rename from concurrency/wikipedia/orig/potd.py
rename to attic/concurrency/wikipedia/orig/potd.py
diff --git a/concurrency/wikipedia/orig/potd_tests.py b/attic/concurrency/wikipedia/orig/potd_tests.py
similarity index 100%
rename from concurrency/wikipedia/orig/potd_tests.py
rename to attic/concurrency/wikipedia/orig/potd_tests.py
diff --git a/concurrency/wikipedia/orig/sync.py b/attic/concurrency/wikipedia/orig/sync.py
similarity index 100%
rename from concurrency/wikipedia/orig/sync.py
rename to attic/concurrency/wikipedia/orig/sync.py
diff --git a/concurrency/wikipedia/orig/sync_py3.py b/attic/concurrency/wikipedia/orig/sync_py3.py
similarity index 100%
rename from concurrency/wikipedia/orig/sync_py3.py
rename to attic/concurrency/wikipedia/orig/sync_py3.py
diff --git a/concurrency/wikipedia/test_daypicts.py b/attic/concurrency/wikipedia/test_daypicts.py
similarity index 100%
rename from concurrency/wikipedia/test_daypicts.py
rename to attic/concurrency/wikipedia/test_daypicts.py
diff --git a/control/adder/coroadder.py b/attic/control/adder/coroadder.py
similarity index 93%
rename from control/adder/coroadder.py
rename to attic/control/adder/coroadder.py
index 6014a0f..22d0565 100644
--- a/control/adder/coroadder.py
+++ b/attic/control/adder/coroadder.py
@@ -45,7 +45,7 @@
def adder_coro(initial=0):
total = initial
- num_terms = 0
+ count = 0
while True:
try:
term = yield total
@@ -54,8 +54,8 @@ def adder_coro(initial=0):
if term is None:
break
total += term
- num_terms += 1
- return Result(total, num_terms, total/num_terms)
+ count += 1
+ return Result(total, count, total/count)
def prompt():
diff --git a/attic/control/adder/coroadder0.py b/attic/control/adder/coroadder0.py
new file mode 100644
index 0000000..a3b497c
--- /dev/null
+++ b/attic/control/adder/coroadder0.py
@@ -0,0 +1,43 @@
+"""
+Closing a generator raises ``GeneratorExit`` at the pending ``yield``
+
+ >>> adder = adder_coro()
+ >>> next(adder)
+ 0
+ >>> adder.send(10)
+ 10
+ >>> adder.send(20)
+ 30
+ >>> adder.send(30)
+ 60
+ >>> adder.close()
+ -> total: 60 terms: 3 average: 20.0
+
+
+Other exceptions propagate to the caller:
+
+ >>> adder = adder_coro()
+ >>> next(adder)
+ 0
+ >>> adder.send(10)
+ 10
+ >>> adder.send('spam')
+ Traceback (most recent call last):
+ ...
+ TypeError: unsupported operand type(s) for +=: 'int' and 'str'
+
+
+"""
+
+def adder_coro(initial=0):
+ total = initial
+ count = 0
+ try:
+ while True:
+ term = yield total
+ total += term
+ count += 1
+ except GeneratorExit:
+ average = total / count
+ msg = '-> total: {} terms: {} average: {}'
+ print(msg.format(total, count, average))
diff --git a/control/adder/coroadder_deco.py b/attic/control/adder/coroadder_deco.py
similarity index 100%
rename from control/adder/coroadder_deco.py
rename to attic/control/adder/coroadder_deco.py
diff --git a/control/adder/soma.py b/attic/control/adder/soma.py
similarity index 100%
rename from control/adder/soma.py
rename to attic/control/adder/soma.py
diff --git a/control/adder/soma_deco.py b/attic/control/adder/soma_deco.py
similarity index 100%
rename from control/adder/soma_deco.py
rename to attic/control/adder/soma_deco.py
diff --git a/attic/control/adder/yetanother.py b/attic/control/adder/yetanother.py
new file mode 100644
index 0000000..fcc4a3a
--- /dev/null
+++ b/attic/control/adder/yetanother.py
@@ -0,0 +1,47 @@
+import sys
+import collections
+
+Result = collections.namedtuple('Result', 'total average')
+
+def adder():
+ total = 0
+ count = 0
+ while True:
+ term = yield
+ try:
+ term = float(term)
+ except (ValueError, TypeError):
+ break
+ else:
+ total += term
+ count += 1
+ return Result(total, total/count)
+
+def process_args(coro, args):
+ for arg in args:
+ coro.send(arg)
+ try:
+ next(coro)
+ except StopIteration as exc:
+ return exc.value
+
+
+def prompt(coro):
+ while True:
+ term = input('+> ')
+ try:
+ coro.send(term)
+ except StopIteration as exc:
+ return exc.value
+
+
+def main():
+ coro = adder()
+ next(coro) # prime it
+ if len(sys.argv) > 1:
+ res = process_args(coro, sys.argv[1:])
+ else:
+ res = prompt(coro)
+ print(res)
+
+main()
diff --git a/attic/control/adder/yield_from_input.py b/attic/control/adder/yield_from_input.py
new file mode 100644
index 0000000..6cedc37
--- /dev/null
+++ b/attic/control/adder/yield_from_input.py
@@ -0,0 +1,42 @@
+import sys
+
+
+def ask():
+ prompt = '>'
+ while True:
+ response = input(prompt)
+ if not response:
+ return 0
+ yield response
+
+
+def parse_args():
+ yield from iter(sys.argv[1:])
+
+
+def fetch(producer):
+ gen = producer()
+ next(gen)
+ yield from gen
+
+
+def main(args):
+ if args:
+ producer = parse_args
+ else:
+ producer = ask
+
+ total = 0
+ count = 0
+ gen = fetch(producer())
+ while True:
+ term = yield from gen
+ term = float(term)
+ total += term
+ count += 1
+ average = total / count
+ print('total: {} average: {}'.format(total, average))
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/control/coro_demo.rst b/attic/control/coro_demo.rst
similarity index 100%
rename from control/coro_demo.rst
rename to attic/control/coro_demo.rst
diff --git a/control/coro_simple_demo.rst b/attic/control/coro_simple_demo.rst
similarity index 100%
rename from control/coro_simple_demo.rst
rename to attic/control/coro_simple_demo.rst
diff --git a/attic/control/coroaverager.py b/attic/control/coroaverager.py
new file mode 100644
index 0000000..dc76aaa
--- /dev/null
+++ b/attic/control/coroaverager.py
@@ -0,0 +1,45 @@
+"""
+Closing a generator raises ``GeneratorExit`` at the pending ``yield``
+
+ >>> coro_avg = averager()
+ >>> next(coro_avg)
+ 0.0
+ >>> coro_avg.send(10)
+ 10.0
+ >>> coro_avg.send(20)
+ 15.0
+ >>> coro_avg.send(30)
+ 20.0
+ >>> coro_avg.close()
+ -> total: 60.0 average: 20.0 terms: 3
+
+
+Other exceptions propagate to the caller:
+
+ >>> coro_avg = averager()
+ >>> next(coro_avg)
+ 0.0
+ >>> coro_avg.send(10)
+ 10.0
+ >>> coro_avg.send('spam')
+ Traceback (most recent call last):
+ ...
+ TypeError: unsupported operand type(s) for +=: 'float' and 'str'
+
+
+"""
+
+# BEGIN CORO_AVERAGER
+def averager():
+ total = average = 0.0
+ count = 0
+ try:
+ while True:
+ term = yield average
+ total += term
+ count += 1
+ average = total/count
+ except GeneratorExit:
+ msg = '-> total: {} average: {} terms: {}'
+ print(msg.format(total, average, count))
+# END CORO_AVERAGER
diff --git a/attic/control/countdown_yf.py b/attic/control/countdown_yf.py
new file mode 100644
index 0000000..013a2bf
--- /dev/null
+++ b/attic/control/countdown_yf.py
@@ -0,0 +1,69 @@
+from time import sleep
+
+def countdown(n):
+ while n:
+ print('\tn ->', n)
+ yield n
+ n -= 1
+ sleep(1)
+
+def foo():
+ for i in range(6, 3, -1):
+ yield i
+ yield from countdown(3)
+
+#for j in foo():
+# print('j ->', j)
+
+
+def squares(n):
+ yield from [i for i in range(n)]
+ yield from [i*i for i in range(n)]
+
+def squares_stupid(n):
+ for i in range(n):
+ yield i
+
+ for i in range(n):
+ yield i*i
+
+#for s in squares(10):
+# print(s)
+
+
+def tokenize():
+ while True:
+ source = input('> ')
+ try:
+ obj = eval(source)
+ except BaseException:
+ print('*crash*')
+ return
+ try:
+ it = iter(obj)
+ except TypeError:
+ yield obj
+ return
+ else:
+ yield from it
+
+#g = tokenize()
+
+#for res in g:
+# print(res)
+
+
+from concurrent.futures import Future
+
+def f():
+ f = future()
+
+def foo(fut):
+ print(fut, fut.result())
+f = Future()
+f.add_done_callback(foo)
+f.set_result(42)
+
+
+
+
diff --git a/control/demo_coro.py b/attic/control/demo_coro.py
similarity index 100%
rename from control/demo_coro.py
rename to attic/control/demo_coro.py
diff --git a/control/exemplo0.py b/attic/control/exemplo0.py
similarity index 100%
rename from control/exemplo0.py
rename to attic/control/exemplo0.py
diff --git a/control/exemplo1.py b/attic/control/exemplo1.py
similarity index 100%
rename from control/exemplo1.py
rename to attic/control/exemplo1.py
diff --git a/attic/control/flatten.py b/attic/control/flatten.py
new file mode 100644
index 0000000..b89a827
--- /dev/null
+++ b/attic/control/flatten.py
@@ -0,0 +1,21 @@
+"""
+
+ >>> items = [1, 2, [3, 4, [5, 6], 7], 8]
+ >>> flatten(items)
+
+ >>> list(flatten(items))
+ [1, 2, 3, 4, 5, 6, 7, 8]
+ >>> mixed_bag = [1, 'spam', 2, [3, 'eggs', 4], {'x': 1, 'y': 2}]
+ >>> list(flatten(mixed_bag))
+ [1, 'spam', 2, 3, 'eggs', 4, 'y', 'x']
+"""
+
+
+from collections import Iterable
+
+def flatten(items):
+ for x in items:
+ if isinstance(x, Iterable) and not isinstance(x, (str, bytes)):
+ yield from flatten(x)
+ else:
+ yield x
diff --git a/control/guido/guido0.py b/attic/control/guido/guido0.py
similarity index 100%
rename from control/guido/guido0.py
rename to attic/control/guido/guido0.py
diff --git a/control/guido/guido1.py b/attic/control/guido/guido1.py
similarity index 100%
rename from control/guido/guido1.py
rename to attic/control/guido/guido1.py
diff --git a/control/guido/guido1b.py b/attic/control/guido/guido1b.py
similarity index 100%
rename from control/guido/guido1b.py
rename to attic/control/guido/guido1b.py
diff --git a/control/guido/guido2.py b/attic/control/guido/guido2.py
similarity index 100%
rename from control/guido/guido2.py
rename to attic/control/guido/guido2.py
diff --git a/control/guido/guido3.py b/attic/control/guido/guido3.py
similarity index 100%
rename from control/guido/guido3.py
rename to attic/control/guido/guido3.py
diff --git a/control/http_cli0.py b/attic/control/http_cli0.py
similarity index 100%
rename from control/http_cli0.py
rename to attic/control/http_cli0.py
diff --git a/attic/control/kwcombos.py b/attic/control/kwcombos.py
new file mode 100644
index 0000000..8dffe33
--- /dev/null
+++ b/attic/control/kwcombos.py
@@ -0,0 +1,5 @@
+from keyword import kwlist
+from itertools import combinations
+
+for combo in combinations(kwlist, 2):
+ print(*combo)
diff --git a/decorators/average_broken.py b/attic/decorators/average_broken.py
similarity index 100%
rename from decorators/average_broken.py
rename to attic/decorators/average_broken.py
diff --git a/decorators/average_fixed.py b/attic/decorators/average_fixed.py
similarity index 100%
rename from decorators/average_fixed.py
rename to attic/decorators/average_fixed.py
diff --git a/decorators/average_fixed_py2.py b/attic/decorators/average_fixed_py2.py
similarity index 100%
rename from decorators/average_fixed_py2.py
rename to attic/decorators/average_fixed_py2.py
diff --git a/decorators/average_partial.py b/attic/decorators/average_partial.py
similarity index 100%
rename from decorators/average_partial.py
rename to attic/decorators/average_partial.py
diff --git a/decorators/average_py2.py b/attic/decorators/average_py2.py
similarity index 100%
rename from decorators/average_py2.py
rename to attic/decorators/average_py2.py
diff --git a/decorators/clockdeco2.py b/attic/decorators/clockdeco2.py
similarity index 100%
rename from decorators/clockdeco2.py
rename to attic/decorators/clockdeco2.py
diff --git a/decorators/clockdeco2_demo.py b/attic/decorators/clockdeco2_demo.py
similarity index 100%
rename from decorators/clockdeco2_demo.py
rename to attic/decorators/clockdeco2_demo.py
diff --git a/decorators/clockdeco2_tests.py b/attic/decorators/clockdeco2_tests.py
similarity index 100%
rename from decorators/clockdeco2_tests.py
rename to attic/decorators/clockdeco2_tests.py
diff --git a/decorators/clockdeco_demo2.py b/attic/decorators/clockdeco_demo2.py
similarity index 100%
rename from decorators/clockdeco_demo2.py
rename to attic/decorators/clockdeco_demo2.py
diff --git a/decorators/clockdeco_demo3.py b/attic/decorators/clockdeco_demo3.py
similarity index 100%
rename from decorators/clockdeco_demo3.py
rename to attic/decorators/clockdeco_demo3.py
diff --git a/decorators/clockdeco_tests.py b/attic/decorators/clockdeco_tests.py
similarity index 100%
rename from decorators/clockdeco_tests.py
rename to attic/decorators/clockdeco_tests.py
diff --git a/decorators/currency.py b/attic/decorators/currency.py
similarity index 100%
rename from decorators/currency.py
rename to attic/decorators/currency.py
diff --git a/decorators/fibonacci.py b/attic/decorators/fibonacci.py
similarity index 100%
rename from decorators/fibonacci.py
rename to attic/decorators/fibonacci.py
diff --git a/decorators/local_demo.py b/attic/decorators/local_demo.py
similarity index 100%
rename from decorators/local_demo.py
rename to attic/decorators/local_demo.py
diff --git a/decorators/stacked_demo.py b/attic/decorators/stacked_demo.py
similarity index 100%
rename from decorators/stacked_demo.py
rename to attic/decorators/stacked_demo.py
diff --git a/descriptors/doc_descriptor.py b/attic/descriptors/doc_descriptor.py
similarity index 100%
rename from descriptors/doc_descriptor.py
rename to attic/descriptors/doc_descriptor.py
diff --git a/dicts/dict_perftest.py b/attic/dicts/dict_perftest.py
similarity index 100%
rename from dicts/dict_perftest.py
rename to attic/dicts/dict_perftest.py
diff --git a/dicts/index_alex.py b/attic/dicts/index_alex.py
similarity index 94%
rename from dicts/index_alex.py
rename to attic/dicts/index_alex.py
index 27d7175..73db8c6 100644
--- a/dicts/index_alex.py
+++ b/attic/dicts/index_alex.py
@@ -8,7 +8,7 @@
import sys
import re
-NONWORD_RE = re.compile('\W+')
+NONWORD_RE = re.compile(r'\W+')
idx = {}
with open(sys.argv[1], encoding='utf-8') as fp:
diff --git a/dicts/set_perftest.py b/attic/dicts/set_perftest.py
similarity index 100%
rename from dicts/set_perftest.py
rename to attic/dicts/set_perftest.py
diff --git a/dicts/strkeydict0_userdictsub.py b/attic/dicts/strkeydict0_userdictsub.py
similarity index 100%
rename from dicts/strkeydict0_userdictsub.py
rename to attic/dicts/strkeydict0_userdictsub.py
diff --git a/dicts/strkeydict_dictsub.py b/attic/dicts/strkeydict_dictsub.py
similarity index 100%
rename from dicts/strkeydict_dictsub.py
rename to attic/dicts/strkeydict_dictsub.py
diff --git a/dicts/test_transformdict.py b/attic/dicts/test_transformdict.py
similarity index 100%
rename from dicts/test_transformdict.py
rename to attic/dicts/test_transformdict.py
diff --git a/functions/accgen.py b/attic/functions/accgen.py
similarity index 100%
rename from functions/accgen.py
rename to attic/functions/accgen.py
diff --git a/functions/attrgetter_demo.py b/attic/functions/attrgetter_demo.py
similarity index 100%
rename from functions/attrgetter_demo.py
rename to attic/functions/attrgetter_demo.py
diff --git a/functions/attrgetter_demo.rst b/attic/functions/attrgetter_demo.rst
similarity index 100%
rename from functions/attrgetter_demo.rst
rename to attic/functions/attrgetter_demo.rst
diff --git a/functions/hello.py b/attic/functions/hello.py
similarity index 100%
rename from functions/hello.py
rename to attic/functions/hello.py
diff --git a/functions/strkeydict2.py b/attic/functions/strkeydict2.py
similarity index 100%
rename from functions/strkeydict2.py
rename to attic/functions/strkeydict2.py
diff --git a/attic/futures/callbackhell.js b/attic/futures/callbackhell.js
new file mode 100644
index 0000000..d309e19
--- /dev/null
+++ b/attic/futures/callbackhell.js
@@ -0,0 +1,14 @@
+fetch1(request1, function (response1) {
+ // phase 1
+ var request2 = step1(response1);
+
+ fetch2(request2, function (response2) {
+ // phase 2
+ var request3 = step2(response2);
+
+ fetch3(request3, function (response3) {
+ // phase 3
+ step3(response3);
+ });
+ });
+});
diff --git a/attic/futures/callbackhell.py b/attic/futures/callbackhell.py
new file mode 100644
index 0000000..d5d1c3c
--- /dev/null
+++ b/attic/futures/callbackhell.py
@@ -0,0 +1,15 @@
+def phase1(response1):
+ request2 = step1(response1)
+ fetch2(request2, phase2)
+
+
+def phase2(response2):
+ request3 = step2(response2)
+ fetch3(request3, phase3)
+
+
+def phase3(response3):
+ step3(response3)
+
+
+fetch1(request1, phase1)
diff --git a/attic/futures/charfinder/charfinder_index.pickle b/attic/futures/charfinder/charfinder_index.pickle
new file mode 100644
index 0000000..f57fba7
Binary files /dev/null and b/attic/futures/charfinder/charfinder_index.pickle differ
diff --git a/attic/futures/coroutine_purgatory.py b/attic/futures/coroutine_purgatory.py
new file mode 100644
index 0000000..a329de7
--- /dev/null
+++ b/attic/futures/coroutine_purgatory.py
@@ -0,0 +1,14 @@
+@asyncio.coroutine
+def three_phases():
+ response1 = yield from fetch1(request1)
+ # phase 1
+ request2 = step1(response1)
+ response2 = yield from fetch2(request2)
+ # phase 2
+ request3 = step2(response2)
+ response3 = yield from fetch3(request3)
+ # phase 3
+ step3(response3)
+
+
+loop.create_task(three_phases)
diff --git a/attic/futures/countries/README.rst b/attic/futures/countries/README.rst
new file mode 100644
index 0000000..026d995
--- /dev/null
+++ b/attic/futures/countries/README.rst
@@ -0,0 +1,194 @@
+====================================
+Configuring a local test environment
+====================================
+
+tl;dr;
+======
+
+This text explains how to configure **nginx** and **vaurien** to build a local
+mirror of the data to run the flag download examples while avoiding network
+traffic and introducing controlled delays and errors for testing, thanks to
+the **vaurien** proxy.
+
+
+Rationale and overview
+======================
+
+The flag download examples are designed to compare the performance of
+different approaches to finding and downloading files from the Web. However,
+we don't want to hit a public server with multiple requests per second while
+testing, and we want to be able to simulate high latency and random network
+errors.
+
+For this setup I chose **nginx** as the HTTP server because it is very fast
+and easy to configure, and the **vaurien** proxy because it was designed by
+Mozilla to introduce delays and network errors for testing.
+
+The archive ``flags.zip``, contains a directory ``flags/`` with 194
+subdirectories, each containing a ``.gif` image and a ``metadata.json`` file.
+These images are public-domain flags copied from the CIA World Fact Book [1].
+
+[1] https://www.cia.gov/library/publications/the-world-factbook/
+
+Once these files are unpacked to the ``flags/`` directory and **nginx** is
+configured, you can experiment with the ``flags*.py``examples without hitting
+the network.
+
+
+Instructions
+============
+
+1. Unpack test data
+-------------------
+
+Unpack the initial data in the ``countries/`` directory and verify that 194
+directories are created in ``countries/flags/``, each with a ``.gif`` and
+a ``metadata.json`` file::
+
+ $ unzip flags.zip
+ ... many lines omitted...
+ creating: flags/zw/
+ inflating: flags/zw/metadata.json
+ inflating: flags/zw/zw.gif
+ $ ls flags | wc -w
+ 194
+ $ find flags | grep .gif | wc -l
+ 194
+ $ find flags | grep .json | wc -l
+ 194
+ $ ls flags/ad
+ ad.gif metadata.json
+
+
+2. Install **nginx**
+--------------------
+
+Download and install **nginx**. I used version 1.6.2 -- the latest
+stable version as I write this.
+
+- Download page: http://nginx.org/en/download.html
+
+- Beginner's guide: http://nginx.org/en/docs/beginners_guide.html
+
+
+3. Configure **nginx**
+----------------------
+
+Edit the the ``nginx.conf`` file to set the port and document root.
+You can determine which ``nginx.conf`` is in use by running::
+
+ $ nginx -V
+
+The output starts with::
+
+ nginx version: nginx/1.6.2
+ built by clang 6.0 (clang-600.0.51) (based on LLVM 3.5svn)
+ TLS SNI support enabled
+ configure arguments:...
+
+Among the configure arguments you'll see ``--conf-path=``. That's the
+file you will edit.
+
+Most of the content in ``nginx.conf`` is within a block labeled ``http``
+and enclosed in curly braces. Within that block there can be multiple
+blocks labeled ``server``. Add another ``server`` block like this one::
+
+ server {
+ listen 8001;
+
+ location /flags/ {
+ root /full-path-to.../countries/;
+ }
+ }
+
+After editing ``nginx.conf`` the server must be started (if it's not
+running) or told to reload the configuration file::
+
+ $ nginx # to start, if necessary
+ $ nginx -s reload # to reload the configuration
+
+To test the configuration, open the URL below in a browser. You should
+see the blue, yellow and red flag of Andorra::
+
+ http://localhost:8001/flags/ad/ad.gif
+
+If the test fails, please double check the procedure just described and
+refer to the **nginx** documentation.
+
+At this point you may run the ``flags_*2.py`` examples against the **nginx**
+install by changing the ``BASE_URL`` constant in ``flags_sequential2.py``.
+However, **nginx** is so fast that you will not see much difference in run
+time between the sequential and the threaded versions, for example. For more
+realistic testing with simulated network lag, we need **vaurien**.
+
+
+4. Install and run **vaurien**
+------------------------------
+
+**vaurien depends on gevent which is only available for Python 2.5-2.7. To
+install vaurien I opened another shell, created another ``virtualenv`` for
+Python 2.7, and used that environment to install and run vaurien::
+
+ $ virtualenv-2.7 .env27 --no-site-packages --distribute
+ New python executable in .env27/bin/python
+ Installing setuptools, pip...done.
+ $ . .env27/bin/activate
+ (.env27)$ pip install vaurien
+ Downloading/unpacking vaurien
+ Downloading vaurien-1.9.tar.gz (50kB): 50kB downloaded
+ ...many lines and a few minutes later...
+
+ Successfully installed vaurien cornice gevent statsd-client vaurienclient
+ greenlet http-parser pyramid simplejson requests zope.interface
+ translationstring PasteDeploy WebOb repoze.lru zope.deprecation venusian
+ Cleaning up...
+ (.env27)$
+
+Using that same shell with the ``.env27`` activated, run the ``vaurien_delay.sh`` script in the ``countries/`` directory::
+
+ (.env27)$ $ ./vaurien_delay.sh
+ 2015-02-25 20:20:17 [69124] [INFO] Starting the Chaos TCP Server
+ 2015-02-25 20:20:17 [69124] [INFO] Options:
+ 2015-02-25 20:20:17 [69124] [INFO] * proxies from localhost:8002 to localhost:8001
+ 2015-02-25 20:20:17 [69124] [INFO] * timeout: 30
+ 2015-02-25 20:20:17 [69124] [INFO] * stay_connected: 0
+ 2015-02-25 20:20:17 [69124] [INFO] * pool_max_size: 100
+ 2015-02-25 20:20:17 [69124] [INFO] * pool_timeout: 30
+ 2015-02-25 20:20:17 [69124] [INFO] * async_mode: 1
+
+The ``vaurien_delay.sh`` adds a 1s delay to every response.
+
+There is also the ``vaurien_error_delay.sh`` script which produces errors in 25% of the responses and a .5 se delay to 50% of the responses.
+
+
+Platform-specific instructions
+==============================
+
+Nginx setup on Mac OS X
+-----------------------
+
+Homebrew (copy & paste code at the bottom of http://brew.sh/)::
+
+ $ ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
+ $ brew doctor
+ $ brew install nginx
+
+Download and unpack::
+
+Docroot is: /usr/local/var/www
+/usr/local/etc/nginx/nginx.conf
+
+To have launchd start nginx at login:
+ ln -sfv /usr/local/opt/nginx/*.plist ~/Library/LaunchAgents
+Then to load nginx now:
+ launchctl load ~/Library/LaunchAgents/homebrew.mxcl.nginx.plist
+Or, if you don't want/need launchctl, you can just run:
+ nginx
+
+
+
+Nginx setup on Lubuntu 14.04.1 LTS
+----------------------------------
+
+Docroot is: /usr/share/nginx/html
+
diff --git a/attic/futures/countries/flags_asyncio2.py b/attic/futures/countries/flags_asyncio2.py
new file mode 100644
index 0000000..2bcf0f8
--- /dev/null
+++ b/attic/futures/countries/flags_asyncio2.py
@@ -0,0 +1,61 @@
+"""Download flags of top 20 countries by population
+
+asyncio+aiottp version
+
+Sample run::
+
+ $ python3 flags_asyncio.py
+ NG retrieved.
+ FR retrieved.
+ IN retrieved.
+ ...
+ EG retrieved.
+ DE retrieved.
+ IR retrieved.
+ 20 flags downloaded in 1.08s
+
+"""
+
+import asyncio
+
+import aiohttp
+
+from flags import BASE_URL, save_flag, main
+
+
+@asyncio.coroutine
+def get_flag(cc):
+ url = '{}/{cc}/{cc}.gif'.format(BASE_URL, cc=cc.lower())
+ res = yield from aiohttp.request('GET', url)
+ image = yield from res.read()
+ return image
+
+
+@asyncio.coroutine
+def download_one(cc):
+ image = yield from get_flag(cc)
+ print('{} retrieved.'.format(cc))
+ save_flag(image, cc.lower() + '.gif')
+ return cc
+
+
+@asyncio.coroutine
+def downloader_coro(cc_list):
+ to_do = [download_one(cc) for cc in cc_list]
+ results = []
+ for future in asyncio.as_completed(to_do):
+ print(future)
+ result = yield from future
+ results.append(result)
+ return results
+
+
+def download_many(cc_list):
+ loop = asyncio.get_event_loop()
+ results = loop.run_until_complete(downloader_coro(cc_list))
+ loop.close()
+ return len(results)
+
+
+if __name__ == '__main__':
+ main(download_many)
diff --git a/attic/futures/countries/flags_processpool.py b/attic/futures/countries/flags_processpool.py
new file mode 100644
index 0000000..c24ca1d
--- /dev/null
+++ b/attic/futures/countries/flags_processpool.py
@@ -0,0 +1,42 @@
+"""Download flags of top 20 countries by population
+
+ProcessPoolExecutor version
+
+Sample run::
+
+ $ python3 flags_threadpool.py
+ BD retrieved.
+ EG retrieved.
+ CN retrieved.
+ ...
+ PH retrieved.
+ US retrieved.
+ IR retrieved.
+ 20 flags downloaded in 0.93s
+
+"""
+# BEGIN FLAGS_PROCESSPOOL
+from concurrent import futures
+
+from flags import save_flag, get_flag, show, main
+
+MAX_WORKERS = 20
+
+
+def download_one(cc):
+ image = get_flag(cc)
+ show(cc)
+ save_flag(image, cc.lower() + '.gif')
+ return cc
+
+
+def download_many(cc_list):
+ with futures.ProcessPoolExecutor() as executor: # <1>
+ res = executor.map(download_one, sorted(cc_list))
+
+ return len(list(res))
+
+
+if __name__ == '__main__':
+ main(download_many)
+# END FLAGS_PROCESSPOOL
diff --git a/attic/futures/countries/notes.txt b/attic/futures/countries/notes.txt
new file mode 100644
index 0000000..aaefc0d
--- /dev/null
+++ b/attic/futures/countries/notes.txt
@@ -0,0 +1,103 @@
+Prefixes with most flags:
+
+M 18
+S 18
+B 17
+C 15
+T 13
+G 12
+A 11
+L 11
+K 10
+
+There are no flags with prefix X
+
+Errors with threadpool:
+
+$ python3 flags_threadpool2.py _
+
+ZT failed: 503 - Service Temporarily Unavailable
+ZU failed: 503 - Service Temporarily Unavailable
+ZV failed: 503 - Service Temporarily Unavailable
+ZY failed: 503 - Service Temporarily Unavailable
+--------------------
+24 flags downloaded.
+37 not found.
+615 errors.
+Elapsed time: 3.86s
+
+
+$ python3 flags_sequential2.py
+Searching for 10 flags: BD, BR, CN, ID, IN, JP, NG, PK, RU, US
+BD failed: (ProtocolError('Connection aborted.', gaierror(8, 'nodename nor servname provided, or not known')),)
+--------------------
+0 flag downloaded.
+1 error.
+Elapsed time: 0.02s
+*** WARNING: 9 downloads never started! ***
+
+
+194 flags downloaded.
+482 not found.
+Elapsed time: 683.71s
+
+real 11m23.870s
+user 0m3.214s
+sys 0m0.603s
+
+
+$ python3 flags2.py -a
+LOCAL site: http://localhost:8001/flags
+Searching for 194 flags: from AD to ZW
+1 concurrent conection will be used.
+--------------------
+194 flags downloaded.
+Elapsed time: 0.90s
+(.env34) 192:countries luciano$ python3 flags2.py -e
+LOCAL site: http://localhost:8001/flags
+Searching for 676 flags: from AA to ZZ
+1 concurrent conection will be used.
+--------------------
+194 flags downloaded.
+482 not found.
+Elapsed time: 4.71s
+(.env34) 192:countries luciano$ python3 flags2.py -s remote
+(.env34) 192:countries luciano$ python3 flags2.py -s remote -a -l 100
+REMOTE site: http://python.pro.br/fluent/data/flags
+Searching for 100 flags: from AD to LK
+1 concurrent conection will be used.
+--------------------
+100 flags downloaded.
+Elapsed time: 72.58s
+(.env34) 192:countries luciano$ python3 flags2.py -s remote -e
+REMOTE site: http://python.pro.br/fluent/data/flags
+Searching for 676 flags: from AA to ZZ
+1 concurrent conection will be used.
+--------------------
+194 flags downloaded.
+482 not found.
+Elapsed time: 436.09s
+(.env34) 192:countries luciano$ python3 flags2_threadpool.py -s remote -e
+REMOTE site: http://python.pro.br/fluent/data/flags
+Searching for 676 flags: from AA to ZZ
+30 concurrent conections will be used.
+--------------------
+194 flags downloaded.
+482 not found.
+Elapsed time: 12.32s
+(.env34) 192:countries luciano$ python3 flags2_threadpool.py -s remote -e -m 100
+REMOTE site: http://python.pro.br/fluent/data/flags
+Searching for 676 flags: from AA to ZZ
+100 concurrent conections will be used.
+--------------------
+89 flags downloaded.
+184 not found.
+403 errors.
+Elapsed time: 7.62s
+(.env34) 192:countries luciano$
+
+wait_with_progress
+http://compiletoi.net/fast-scraping-in-python-with-asyncio.html
+
+http://blog.condi.me/asynchronous-part-1/
+
diff --git a/attic/futures/demo_executor_submit.py b/attic/futures/demo_executor_submit.py
new file mode 100644
index 0000000..a899fe1
--- /dev/null
+++ b/attic/futures/demo_executor_submit.py
@@ -0,0 +1,31 @@
+"""
+Experiments with futures
+"""
+
+from time import sleep, strftime
+from concurrent import futures
+
+def display(*args):
+ print(strftime('[%H:%M:%S]'), end=' ')
+ print(*args)
+
+
+def loiter(n):
+ msg = '{}loiter({}): doing nothing for {}s...'
+ display(msg.format('\t'*n, n, n))
+ sleep(n)
+ msg = '{}loiter({}): done.'
+ display(msg.format('\t'*n, n))
+ return n * 10
+
+
+def demo_submit():
+ executor = futures.ThreadPoolExecutor(3)
+ future_list = [executor.submit(loiter, n) for n in range(5)]
+ display('done?', [future.done() for future in future_list])
+ display('Waiting for results...')
+ for i, result in enumerate(future.result() for future in future_list):
+ display('result[{}]: {}'.format(i, result))
+
+
+demo_submit()
diff --git a/attic/futures/future_yield.py b/attic/futures/future_yield.py
new file mode 100644
index 0000000..d531262
--- /dev/null
+++ b/attic/futures/future_yield.py
@@ -0,0 +1,41 @@
+"""
+An experiment showing that ``asyncio.Future`` is an iterable (it
+implements `__iter__`) designed to be used with ``yield from``.
+
+Priming the future returns itself. After the result of the future
+is set, next iteration produces the result as the ``value`` attribute
+of ``StopIteration``.
+
+Sample run::
+
+ $ python3 future_yield.py
+ a, future: 0x66514c
+ b, prime_res: 0x66514c
+ b, exc.value: 42
+
+"""
+
+import asyncio
+
+@asyncio.coroutine
+def a(future):
+ print('a, future:\t', future, hex(id(future)))
+ res = yield from future
+ return res
+
+def b():
+ future = asyncio.Future()
+ coro = a(future)
+ prime_res = next(coro)
+ print('b, prime_res:\t', prime_res, hex(id(future)))
+ # If next(coro) is called again before the result of
+ # the future is set, we get:
+ # AssertionError: yield from wasn't used with future
+ #result = next(coro) # uncomment to see AssertionError
+ future.set_result(42)
+ try:
+ next(coro)
+ except StopIteration as exc:
+ print('b, exc.value:\t', exc.value)
+
+b()
diff --git a/attic/futures/future_yield2.py b/attic/futures/future_yield2.py
new file mode 100644
index 0000000..336489e
--- /dev/null
+++ b/attic/futures/future_yield2.py
@@ -0,0 +1,19 @@
+@asyncio.coroutine
+def a(future):
+ print('a, future:', future, hex(id(future)))
+ res = yield from future
+ return res
+
+def b():
+ future = asyncio.Future()
+ coro = a(future)
+ prime_result = next(coro)
+ print('b, prime_result:', prime_result, hex(id(future)))
+
+loop = asyncio.get_event_loop()
+future = asyncio.Future()
+print('future:', future, hex(id(future)))
+tasks = [asyncio.async(a(future))]
+
+res = loop.run_until_complete(b())
+
diff --git a/interfaces/dict_subclass.py b/attic/interfaces/dict_subclass.py
similarity index 100%
rename from interfaces/dict_subclass.py
rename to attic/interfaces/dict_subclass.py
diff --git a/interfaces/exceptions-tree.txt b/attic/interfaces/exceptions-tree.txt
similarity index 100%
rename from interfaces/exceptions-tree.txt
rename to attic/interfaces/exceptions-tree.txt
diff --git a/interfaces/pypy_difference.rst b/attic/interfaces/pypy_difference.rst
similarity index 100%
rename from interfaces/pypy_difference.rst
rename to attic/interfaces/pypy_difference.rst
diff --git a/interfaces/subclassing_builtins.rst b/attic/interfaces/subclassing_builtins.rst
similarity index 100%
rename from interfaces/subclassing_builtins.rst
rename to attic/interfaces/subclassing_builtins.rst
diff --git a/iterables/CACM/citation.txt b/attic/iterables/CACM/citation.txt
similarity index 100%
rename from iterables/CACM/citation.txt
rename to attic/iterables/CACM/citation.txt
diff --git a/iterables/CACM/closed_file.py b/attic/iterables/CACM/closed_file.py
similarity index 100%
rename from iterables/CACM/closed_file.py
rename to attic/iterables/CACM/closed_file.py
diff --git a/iterables/CACM/haha.py b/attic/iterables/CACM/haha.py
similarity index 100%
rename from iterables/CACM/haha.py
rename to attic/iterables/CACM/haha.py
diff --git a/iterables/CACM/less_more.py b/attic/iterables/CACM/less_more.py
similarity index 100%
rename from iterables/CACM/less_more.py
rename to attic/iterables/CACM/less_more.py
diff --git a/iterables/CACM/zero_div.py b/attic/iterables/CACM/zero_div.py
similarity index 100%
rename from iterables/CACM/zero_div.py
rename to attic/iterables/CACM/zero_div.py
diff --git a/iterables/almost_aritprog_v0.py b/attic/iterables/almost_aritprog_v0.py
similarity index 100%
rename from iterables/almost_aritprog_v0.py
rename to attic/iterables/almost_aritprog_v0.py
diff --git a/iterables/almost_aritprog_v6.py b/attic/iterables/almost_aritprog_v6.py
similarity index 100%
rename from iterables/almost_aritprog_v6.py
rename to attic/iterables/almost_aritprog_v6.py
diff --git a/iterables/aritprog_v4.py b/attic/iterables/aritprog_v4.py
similarity index 100%
rename from iterables/aritprog_v4.py
rename to attic/iterables/aritprog_v4.py
diff --git a/iterables/aritprog_v5.py b/attic/iterables/aritprog_v5.py
similarity index 100%
rename from iterables/aritprog_v5.py
rename to attic/iterables/aritprog_v5.py
diff --git a/iterables/paragraph.py b/attic/iterables/paragraph.py
similarity index 100%
rename from iterables/paragraph.py
rename to attic/iterables/paragraph.py
diff --git a/iterables/simplest_generators.doctest b/attic/iterables/simplest_generators.doctest
similarity index 100%
rename from iterables/simplest_generators.doctest
rename to attic/iterables/simplest_generators.doctest
diff --git a/iterables/vector.py b/attic/iterables/vector.py
similarity index 100%
rename from iterables/vector.py
rename to attic/iterables/vector.py
diff --git a/iterables/vector_flex_init.py b/attic/iterables/vector_flex_init.py
similarity index 100%
rename from iterables/vector_flex_init.py
rename to attic/iterables/vector_flex_init.py
diff --git a/attic/metaprog/plainpoint.py b/attic/metaprog/plainpoint.py
new file mode 100644
index 0000000..be2f037
--- /dev/null
+++ b/attic/metaprog/plainpoint.py
@@ -0,0 +1,25 @@
+"""
+A class equivalent to the class statement below would be generated by this code:
+
+ >>> import collections
+ >>> Point = collections.plainclass('Point', 'x y')
+"""
+
+class Point(object):
+ __slots__ = ['x', 'y'] # save memory in the likely event there are many instances
+
+ def __init__(self, x, y):
+ self.x = x
+ self.y = y
+
+ def __repr__(self):
+ return 'Point({!r}, {!r})'.format(self.x, self.y)
+
+ def __eq__(self, other):
+ if not isinstance(other, Point):
+ return NotImplemented
+ return self.x == other.x and self.y == other.y
+
+ def __iter__(self, other): # support unpacking
+ yield self.x
+ yield self.y
diff --git a/metaprog/prop_inheritance.py b/attic/metaprog/prop_inheritance.py
similarity index 100%
rename from metaprog/prop_inheritance.py
rename to attic/metaprog/prop_inheritance.py
diff --git a/metaprog/special_attrs.py b/attic/metaprog/special_attrs.py
similarity index 100%
rename from metaprog/special_attrs.py
rename to attic/metaprog/special_attrs.py
diff --git a/metaprog/spreadsheet.py b/attic/metaprog/spreadsheet.py
similarity index 100%
rename from metaprog/spreadsheet.py
rename to attic/metaprog/spreadsheet.py
diff --git a/metaprog/spreadsheet2.py b/attic/metaprog/spreadsheet2.py
similarity index 100%
rename from metaprog/spreadsheet2.py
rename to attic/metaprog/spreadsheet2.py
diff --git a/objects/attr_list.py b/attic/objects/attr_list.py
similarity index 100%
rename from objects/attr_list.py
rename to attic/objects/attr_list.py
diff --git a/objects/attrs_not_in_object.py b/attic/objects/attrs_not_in_object.py
similarity index 100%
rename from objects/attrs_not_in_object.py
rename to attic/objects/attrs_not_in_object.py
diff --git a/objects/cards.py b/attic/objects/cards.py
similarity index 100%
rename from objects/cards.py
rename to attic/objects/cards.py
diff --git a/objects/cards_format.py b/attic/objects/cards_format.py
similarity index 100%
rename from objects/cards_format.py
rename to attic/objects/cards_format.py
diff --git a/objects/common_attrs.txt b/attic/objects/common_attrs.txt
similarity index 100%
rename from objects/common_attrs.txt
rename to attic/objects/common_attrs.txt
diff --git a/objects/not_so_common_attrs.txt b/attic/objects/not_so_common_attrs.txt
similarity index 100%
rename from objects/not_so_common_attrs.txt
rename to attic/objects/not_so_common_attrs.txt
diff --git a/operator/Interest.java b/attic/operator/Interest.java
similarity index 100%
rename from operator/Interest.java
rename to attic/operator/Interest.java
diff --git a/operator/dispatch.py b/attic/operator/dispatch.py
similarity index 100%
rename from operator/dispatch.py
rename to attic/operator/dispatch.py
diff --git a/operator/factorial/CorrectFactorial.java b/attic/operator/factorial/CorrectFactorial.java
similarity index 100%
rename from operator/factorial/CorrectFactorial.java
rename to attic/operator/factorial/CorrectFactorial.java
diff --git a/operator/factorial/SimpleFactorial.java b/attic/operator/factorial/SimpleFactorial.java
similarity index 100%
rename from operator/factorial/SimpleFactorial.java
rename to attic/operator/factorial/SimpleFactorial.java
diff --git a/operator/factorial/factorial.py b/attic/operator/factorial/factorial.py
similarity index 100%
rename from operator/factorial/factorial.py
rename to attic/operator/factorial/factorial.py
diff --git a/operator/interest.py b/attic/operator/interest.py
similarity index 100%
rename from operator/interest.py
rename to attic/operator/interest.py
diff --git a/operator/vector.py b/attic/operator/vector.py
similarity index 100%
rename from operator/vector.py
rename to attic/operator/vector.py
diff --git a/sequences/bisect_demo_pos.py b/attic/sequences/bisect_demo_pos.py
similarity index 100%
rename from sequences/bisect_demo_pos.py
rename to attic/sequences/bisect_demo_pos.py
diff --git a/sequences/bisect_find.py b/attic/sequences/bisect_find.py
similarity index 100%
rename from sequences/bisect_find.py
rename to attic/sequences/bisect_find.py
diff --git a/sequences/bisect_in.py b/attic/sequences/bisect_in.py
similarity index 100%
rename from sequences/bisect_in.py
rename to attic/sequences/bisect_in.py
diff --git a/sequences/bisect_time.py b/attic/sequences/bisect_time.py
similarity index 100%
rename from sequences/bisect_time.py
rename to attic/sequences/bisect_time.py
diff --git a/sequences/dis_iadd_to_item.ods b/attic/sequences/dis_iadd_to_item.ods
similarity index 100%
rename from sequences/dis_iadd_to_item.ods
rename to attic/sequences/dis_iadd_to_item.ods
diff --git a/sequences/dis_iadd_to_item.txt b/attic/sequences/dis_iadd_to_item.txt
similarity index 100%
rename from sequences/dis_iadd_to_item.txt
rename to attic/sequences/dis_iadd_to_item.txt
diff --git a/sequences/frenchdeck2.doctest b/attic/sequences/frenchdeck2.doctest
similarity index 100%
rename from sequences/frenchdeck2.doctest
rename to attic/sequences/frenchdeck2.doctest
diff --git a/sequences/metro_areas.py b/attic/sequences/metro_areas.py
similarity index 100%
rename from sequences/metro_areas.py
rename to attic/sequences/metro_areas.py
diff --git a/sequences/metro_areas.txt b/attic/sequences/metro_areas.txt
similarity index 100%
rename from sequences/metro_areas.txt
rename to attic/sequences/metro_areas.txt
diff --git a/sequences/named_slices.py b/attic/sequences/named_slices.py
similarity index 100%
rename from sequences/named_slices.py
rename to attic/sequences/named_slices.py
diff --git a/sequences/sentence.doctest b/attic/sequences/sentence.doctest
similarity index 100%
rename from sequences/sentence.doctest
rename to attic/sequences/sentence.doctest
diff --git a/sequences/sentence_slice.doctest b/attic/sequences/sentence_slice.doctest
similarity index 100%
rename from sequences/sentence_slice.doctest
rename to attic/sequences/sentence_slice.doctest
diff --git a/sequences/sentence_slice.py b/attic/sequences/sentence_slice.py
similarity index 96%
rename from sequences/sentence_slice.py
rename to attic/sequences/sentence_slice.py
index d275989..918338d 100644
--- a/sequences/sentence_slice.py
+++ b/attic/sequences/sentence_slice.py
@@ -6,9 +6,9 @@
import reprlib
-RE_TOKEN = re.compile('\w+|\s+|[^\w\s]+')
+RE_TOKEN = re.compile(r'\w+|\s+|[^\w\s]+')
RE_WORD = re.compile('\w+')
-RE_PUNCTUATION = re.compile('[^\w\s]+')
+RE_PUNCTUATION = re.compile(r'[^\w\s]+')
class SentenceSlice:
diff --git a/sequences/slice_dump.py b/attic/sequences/slice_dump.py
similarity index 100%
rename from sequences/slice_dump.py
rename to attic/sequences/slice_dump.py
diff --git a/sequences/slice_test.py b/attic/sequences/slice_test.py
similarity index 100%
rename from sequences/slice_test.py
rename to attic/sequences/slice_test.py
diff --git a/sequences/slice_viewer.py b/attic/sequences/slice_viewer.py
similarity index 100%
rename from sequences/slice_viewer.py
rename to attic/sequences/slice_viewer.py
diff --git a/sequences/str_concat.py b/attic/sequences/str_concat.py
similarity index 100%
rename from sequences/str_concat.py
rename to attic/sequences/str_concat.py
diff --git a/sequences/table.py b/attic/sequences/table.py
similarity index 100%
rename from sequences/table.py
rename to attic/sequences/table.py
diff --git a/sequences/tuples.doctest b/attic/sequences/tuples.doctest
similarity index 100%
rename from sequences/tuples.doctest
rename to attic/sequences/tuples.doctest
diff --git a/sequences/war-and-peace.txt b/attic/sequences/war-and-peace.txt
similarity index 100%
rename from sequences/war-and-peace.txt
rename to attic/sequences/war-and-peace.txt
diff --git a/strings-bytes/cafe-gr.txt b/attic/strings-bytes/cafe-gr.txt
similarity index 100%
rename from strings-bytes/cafe-gr.txt
rename to attic/strings-bytes/cafe-gr.txt
diff --git a/strings-bytes/cafe.txt b/attic/strings-bytes/cafe.txt
similarity index 100%
rename from strings-bytes/cafe.txt
rename to attic/strings-bytes/cafe.txt
diff --git a/strings-bytes/casefold_demo.py b/attic/strings-bytes/casefold_demo.py
similarity index 100%
rename from strings-bytes/casefold_demo.py
rename to attic/strings-bytes/casefold_demo.py
diff --git a/strings-bytes/category_demo.py b/attic/strings-bytes/category_demo.py
similarity index 100%
rename from strings-bytes/category_demo.py
rename to attic/strings-bytes/category_demo.py
diff --git a/strings-bytes/charfinder.py b/attic/strings-bytes/charfinder.py
similarity index 100%
rename from strings-bytes/charfinder.py
rename to attic/strings-bytes/charfinder.py
diff --git a/strings-bytes/currency_demo.py b/attic/strings-bytes/currency_demo.py
similarity index 100%
rename from strings-bytes/currency_demo.py
rename to attic/strings-bytes/currency_demo.py
diff --git a/strings-bytes/encodings_demo.py b/attic/strings-bytes/encodings_demo.py
similarity index 100%
rename from strings-bytes/encodings_demo.py
rename to attic/strings-bytes/encodings_demo.py
diff --git a/strings-bytes/identifier_norm.py b/attic/strings-bytes/identifier_norm.py
similarity index 100%
rename from strings-bytes/identifier_norm.py
rename to attic/strings-bytes/identifier_norm.py
diff --git a/strings-bytes/identifier_norm_writer.py b/attic/strings-bytes/identifier_norm_writer.py
similarity index 100%
rename from strings-bytes/identifier_norm_writer.py
rename to attic/strings-bytes/identifier_norm_writer.py
diff --git a/strings-bytes/nfc_demo.py b/attic/strings-bytes/nfc_demo.py
similarity index 100%
rename from strings-bytes/nfc_demo.py
rename to attic/strings-bytes/nfc_demo.py
diff --git a/strings-bytes/nfk_demo.py b/attic/strings-bytes/nfk_demo.py
similarity index 100%
rename from strings-bytes/nfk_demo.py
rename to attic/strings-bytes/nfk_demo.py
diff --git a/strings-bytes/numerics.py b/attic/strings-bytes/numerics.py
similarity index 100%
rename from strings-bytes/numerics.py
rename to attic/strings-bytes/numerics.py
diff --git a/strings-bytes/numerics_demo.txt b/attic/strings-bytes/numerics_demo.txt
similarity index 100%
rename from strings-bytes/numerics_demo.txt
rename to attic/strings-bytes/numerics_demo.txt
diff --git a/strings-bytes/plane_count.py b/attic/strings-bytes/plane_count.py
similarity index 100%
rename from strings-bytes/plane_count.py
rename to attic/strings-bytes/plane_count.py
diff --git a/strings-bytes/sorting.py b/attic/strings-bytes/sorting.py
similarity index 100%
rename from strings-bytes/sorting.py
rename to attic/strings-bytes/sorting.py
diff --git a/strings-bytes/sorting_uca.py b/attic/strings-bytes/sorting_uca.py
similarity index 100%
rename from strings-bytes/sorting_uca.py
rename to attic/strings-bytes/sorting_uca.py
diff --git a/strings-bytes/sorting_uca.txt b/attic/strings-bytes/sorting_uca.txt
similarity index 100%
rename from strings-bytes/sorting_uca.txt
rename to attic/strings-bytes/sorting_uca.txt
diff --git a/strings-bytes/str_repr.py b/attic/strings-bytes/str_repr.py
similarity index 100%
rename from strings-bytes/str_repr.py
rename to attic/strings-bytes/str_repr.py
diff --git a/strings-bytes/str_repr2.py b/attic/strings-bytes/str_repr2.py
similarity index 100%
rename from strings-bytes/str_repr2.py
rename to attic/strings-bytes/str_repr2.py
diff --git a/strings-bytes/strings-bytes-test.txt b/attic/strings-bytes/strings-bytes-test.txt
similarity index 100%
rename from strings-bytes/strings-bytes-test.txt
rename to attic/strings-bytes/strings-bytes-test.txt
diff --git a/classes/bingoaddable.py b/classes/bingoaddable.py
deleted file mode 100644
index e5b53a5..0000000
--- a/classes/bingoaddable.py
+++ /dev/null
@@ -1,59 +0,0 @@
-"""
-======================
-AddableBingoCage tests
-======================
-
-
-Tests for __add__ and __iadd__:
-
- >>> vowels = 'AEIOU'
- >>> globe = AddableBingoCage(vowels)
- >>> len(globe)
- 5
- >>> globe.pop() in vowels
- True
- >>> len(globe)
- 4
- >>> globe2 = AddableBingoCage('XYZ')
- >>> globe3 = globe + globe2
- >>> len(globe3)
- 7
- >>> void = globe + [10, 20]
- Traceback (most recent call last):
- ...
- TypeError: unsupported operand type(s) for +: 'AddableBingoCage' and 'list'
-
-
-Tests for __add__ and __iadd__:
-
- >>> globe_orig = globe
- >>> len(globe)
- 4
- >>> globe += globe2
- >>> len(globe)
- 7
- >>> globe += [10, 20]
- >>> len(globe)
- 9
- >>> globe is globe_orig
- True
-
-"""
-
-# BEGIN ADDABLE_BINGO
-import itertools # <1>
-from bingobase import BingoCage
-
-
-class AddableBingoCage(BingoCage): # <2>
-
- def __add__(self, other):
- if isinstance(other, AddableBingoCage): # <3>
- return AddableBingoCage(itertools.chain(self, other)) # <4>
- else:
- return NotImplemented
-
- def __iadd__(self, other):
- self.load(other) # <5>
- return self # <6>
-# END ADDABLE_BINGO
diff --git a/classes/bingobase.py b/classes/bingobase.py
deleted file mode 100644
index 6e1528e..0000000
--- a/classes/bingobase.py
+++ /dev/null
@@ -1,96 +0,0 @@
-"""
-===============
-BingoCage tests
-===============
-
-
-Create and load instance from iterable::
-
- >>> balls = list(range(3))
- >>> globe = BingoCage(balls)
- >>> len(globe)
- 3
-
-
-Pop and collect balls::
-
- >>> picks = []
- >>> picks.append(globe.pop())
- >>> picks.append(globe.pop())
- >>> picks.append(globe.pop())
-
-
-Check state and results::
-
- >>> len(globe)
- 0
- >>> sorted(picks) == balls
- True
-
-
-Reload::
-
- >>> globe.load(balls)
- >>> len(globe)
- 3
- >>> picks = [globe.pop() for i in balls]
- >>> len(globe)
- 0
-
-
-Load and pop 20 balls to verify that the order has changed::
-
- >>> balls = list(range(20))
- >>> globe = BingoCage(balls)
- >>> picks = []
- >>> while globe:
- ... picks.append(globe.pop())
- >>> len(picks) == len(balls)
- True
- >>> picks != balls
- True
-
-
-Also check that the order is not simply reversed either::
-
- >>> picks[::-1] != balls
- True
-
-Note: last 2 tests above each have 1 chance in 20! (factorial) of
-failing even if the implementation is OK. 1/20!, or approximately
-4.11e-19, is the probability of the 20 balls coming out, by chance,
-in the exact order the were loaded.
-
-Check that `LookupError` (or a subclass) is the exception thrown
-when the device is empty::
-
- >>> globe = BingoCage([])
- >>> try:
- ... globe.pop()
- ... except LookupError as exc:
- ... print('OK')
- OK
-
-"""
-
-import random
-
-
-class BingoCage():
-
- def __init__(self, iterable):
- self._balls = []
- self.load(iterable)
-
- def load(self, iterable):
- self._balls.extend(iterable)
- random.shuffle(self._balls)
-
- def __len__(self):
- return len(self._balls)
-
- def pop(self):
- return self._balls.pop()
-
- def __iter__(self):
- return reversed(self._balls)
diff --git a/classes/private/pt-br/AcessaPrivado.java b/classes/private/pt-br/AcessaPrivado.java
deleted file mode 100644
index b332070..0000000
--- a/classes/private/pt-br/AcessaPrivado.java
+++ /dev/null
@@ -1,25 +0,0 @@
-import java.lang.reflect.Field;
-
-public class AcessaPrivado {
-
- public static void main(String[] args) {
- ObjetoSecreto oSecreto = new ObjetoSecreto("senha super secreta");
- Field campoPrivado = null;
- try {
- campoPrivado = ObjetoSecreto.class.getDeclaredField("escondido");
- }
- catch (NoSuchFieldException e) {
- System.err.println(e);
- System.exit(1);
- }
- campoPrivado.setAccessible(true); // arrombamos a porta
- try {
- String tavaEscondido = (String) campoPrivado.get(oSecreto);
- System.out.println("oSecreto.escondido = " + tavaEscondido);
- }
- catch (IllegalAccessException e) {
- // esta exceção nao acontece porque fizemos setAcessible(true)
- System.err.println(e);
- }
- }
-}
diff --git a/classes/private/pt-br/ObjetoSecreto.java b/classes/private/pt-br/ObjetoSecreto.java
deleted file mode 100644
index 378bf4e..0000000
--- a/classes/private/pt-br/ObjetoSecreto.java
+++ /dev/null
@@ -1,9 +0,0 @@
-public class ObjetoSecreto {
-
- private String escondido = "";
- private String oculto = "dado ultra secreto";
-
- public ObjetoSecreto(String texto) {
- this.escondido = texto;
- }
-}
diff --git a/classes/private/pt-br/acessapriv.py b/classes/private/pt-br/acessapriv.py
deleted file mode 100644
index 4e88556..0000000
--- a/classes/private/pt-br/acessapriv.py
+++ /dev/null
@@ -1,6 +0,0 @@
-import ObjetoSecreto
-
-oSecreto = ObjetoSecreto('senha super secreta')
-campoPrivado = ObjetoSecreto.getDeclaredField('escondido')
-campoPrivado.setAccessible(True) # arrombamos a porta
-print 'oSecreto.escondido =', campoPrivado.get(oSecreto)
diff --git a/classes/private/pt-br/listapriv.py b/classes/private/pt-br/listapriv.py
deleted file mode 100644
index 01c7cde..0000000
--- a/classes/private/pt-br/listapriv.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from java.lang.reflect import Modifier
-import ObjetoSecreto
-
-oSecreto = ObjetoSecreto('senha super secreta')
-campos = ObjetoSecreto.getDeclaredFields()
-for campo in campos:
- # so campos privados!
- if Modifier.isPrivate(campo.getModifiers()):
- print campo
- campo.setAccessible(True) # arrombamos a porta
- print '\t', campo.getName(), '=', campo.get(oSecreto)
diff --git a/control/referencias.txt b/control/referencias.txt
deleted file mode 100644
index 26fa6c3..0000000
--- a/control/referencias.txt
+++ /dev/null
@@ -1,12 +0,0 @@
-What's New in Python 2.5 - PEP 342: New Generator Features
-http://docs.python.org/release/2.5/whatsnew/pep-342.html
-
-PEP 342 -- Coroutines via Enhanced Generators
-http://www.python.org/dev/peps/pep-0342/
-
-PEP 380 -- Syntax for Delegating to a Subgenerator
-http://www.python.org/dev/peps/pep-0380/
-
-Coroutines For the Working Python Developer
-http://sdiehl.github.io/coroutine-tutorial/
-
diff --git a/datamodel/tox.ini b/datamodel/tox.ini
deleted file mode 100644
index 9503f40..0000000
--- a/datamodel/tox.ini
+++ /dev/null
@@ -1,4 +0,0 @@
-[pep8]
-ignore = E127,E302
-# E127: continuation line over-indented for visual indent
-# E302: expected 2 blank lines, found 1
diff --git a/descriptors/bulkfood_v3.py-broken b/descriptors/bulkfood_v3.py-broken
deleted file mode 100644
index e1b231c..0000000
--- a/descriptors/bulkfood_v3.py-broken
+++ /dev/null
@@ -1,60 +0,0 @@
-"""
-WARNING: Broken implementation for demonstration purposes.
-
-A line item for a bulk food order has description, weight and price fields::
-
- >>> raisins = LineItem('Golden raisins', 10, 6.95)
- >>> raisins.weight, raisins.description, raisins.price
- (10, 'Golden raisins', 6.95)
-
-A ``subtotal`` method gives the total price for that line item::
-
- >>> raisins.subtotal()
- 69.5
-
-The weight of a ``LineItem`` must be greater than 0::
-
- >>> raisins.weight = -20
- Traceback (most recent call last):
- ...
- ValueError: value must be > 0
-
-Negative or 0 price is not acceptable either::
-
- >>> truffle = LineItem('White truffle', 100, 0)
- Traceback (most recent call last):
- ...
- ValueError: value must be > 0
-
-
-No change was made::
-
- >>> raisins.weight
- 10
-
-"""
-
-
-class Quantity:
-
- def __init__(self, storage_name):
- self.storage_name = storage_name
-
- def __set__(self, instance, value):
- if value > 0:
- instance.__dict__[self.storage_name] = value
- else:
- raise ValueError('value must be > 0')
-
-
-class LineItem:
- weight = Quantity('weight')
- price = Quantity('weight') # <-- this is the bug discussed in the book
-
- def __init__(self, description, weight, price):
- self.description = description
- self.weight = weight
- self.price = price
-
- def subtotal(self):
- return self.weight * self.price
diff --git a/interfaces/bingo.py b/interfaces/bingo.py
deleted file mode 100644
index 79d9427..0000000
--- a/interfaces/bingo.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import random
-
-from tombola import Tombola
-
-
-class BingoCage(Tombola): # <1>
-
- def __init__(self, items):
- self._balls = list(items) # <2>
-
- def load(self, items):
- self._balls.extend(items)
-
- def pick(self):
- try:
- position = random.randrange(len(self._balls)) # <3>
- except ValueError:
- raise LookupError('pop from empty BingoCage')
- return self._balls.pop(position) # <4>
diff --git a/interfaces/lotto.py b/interfaces/lotto.py
deleted file mode 100644
index d7fca5b..0000000
--- a/interfaces/lotto.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import random
-
-from tombola import Tombola
-
-
-class LotteryBlower(Tombola):
-
- def __init__(self, iterable):
- self.randomizer = random.SystemRandom() # <1>
- self.clear()
- self.load(iterable)
-
- def clear(self):
- self._balls = []
-
- def load(self, iterable):
- self._balls.extend(iterable)
- self.randomizer.shuffle(self._balls) # <2>
-
- def pick(self):
- return self._balls.pop() # <3>
-
- def loaded(self): # <4>
- return len(self._balls) > 0
diff --git a/interfaces/tombola.py b/interfaces/tombola.py
deleted file mode 100644
index 7b25ee3..0000000
--- a/interfaces/tombola.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import abc
-
-class Tombola(abc.ABC): # <1>
-
- @abc.abstractmethod
- def load(self, iterable): # <2>
- """Add items from an iterable."""
-
- @abc.abstractmethod
- def pick(self): # <3>
- """Remove item at random, returning it.
-
- This method should raise `LookupError` when the instance is empty.
- """
-
- def loaded(self): # <4>
- try:
- item = self.pick()
- except LookupError:
- return False
- else:
- self.load([item]) # put it back
- return True
diff --git a/localfiles.txt b/localfiles.txt
index 369e959..7c1a346 100644
--- a/localfiles.txt
+++ b/localfiles.txt
@@ -6,5 +6,5 @@ __pycache__
.gitignore
.DS_Store
*.arr
-README.rst
+/README.rst
LICENSE