Added example solutions to several chapters. Feel free to create a pull request with your answers. Also for the chapters that have no solutions yet :)

This commit is contained in:
Rick van Hattem 2022-09-05 00:04:00 +02:00
parent 82cf71ed1c
commit 500a31afac
No known key found for this signature in database
GPG Key ID: E81444E9CE1F695D
34 changed files with 742 additions and 0 deletions

View File

@ -0,0 +1,29 @@
# Implement the quicksort algorithm.
import random
# one-liner approach
qs = lambda xs: xs if len(xs) <= 1 else qs(
[x for x in xs[1:] if x < xs[0]]) + [xs[0]] + qs(
[x for x in xs[1:] if x >= xs[0]])
# more verbose approach
def quicksort(xs):
if len(xs) <= 1:
return xs
else:
left = quicksort([x for x in xs[1:] if x < xs[0]])
right = quicksort([x for x in xs[1:] if x >= xs[0]])
middle = [xs[0]]
return left + middle + right
def main():
# test
xs = random.sample(range(1000), 100)
assert quicksort(xs) == sorted(xs)
assert qs(xs) == sorted(xs)
if __name__ == '__main__':
main()

View File

@ -0,0 +1,33 @@
# Write a groupby function that isnt affected by sorting.
import collections
def groupby(func, seq):
groups = collections.defaultdict(list)
for item in seq:
groups[func(item)].append(item)
return groups
def main():
# Explicitly defined test data for clarity.
xs = [0, 1, 2, 3, 4, 5, 6, 7]
assert groupby(lambda x: x % 2, xs) == {
0: [0, 2, 4, 6],
1: [1, 3, 5, 7],
}
assert groupby(
lambda x: 'even' if x % 2 == 0 else 'odd',
xs,
) == {'even': [0, 2, 4, 6], 'odd': [1, 3, 5, 7]}
assert groupby(lambda x: x > 5, xs) == {
False: [0, 1, 2, 3, 4, 5],
True: [6, 7],
}
if __name__ == '__main__':
main()

View File

@ -0,0 +1,29 @@
# Write a groupby function that returns lists of results instead of
# generators.
import pprint
def groupby(iterable, key=None):
'''
Return a dictionary of lists of items grouped by the key function.
Note that as opposed to the itertools.groupby function, this function
does not require the iterable to be sorted.
'''
if key is None:
key = lambda x: x
groups = {}
for item in iterable:
groups.setdefault(key(item), []).append(item)
return groups
def main():
# Demo data from the itertools docs
pprint.pprint(groupby('AAAABBBCCDAABBB'))
pprint.pprint(groupby('AAAABBBCCD'))
if __name__ == '__main__':
main()

View File

View File

@ -0,0 +1,43 @@
# Extend the `track` function to monitor execution time.
import functools
import time
from datetime import datetime
def track(function=None, label=None):
# Trick to add an optional argument to our decorator
if label and not function:
return functools.partial(track, label=label)
print(f'initializing {label}')
@functools.wraps(function)
def _track(*args, **kwargs):
print(f'calling {label}')
start = datetime.now()
result = function(*args, **kwargs)
end = datetime.now()
print(f'called {label} in {end - start}')
return result
return _track
@track(label='outer')
@track(label='inner')
def func():
print('func')
@track(label='Slow function')
def slower_func():
print('slower_func')
time.sleep(0.5)
if __name__ == '__main__':
func()
slower_func()

View File

View File

@ -0,0 +1,61 @@
# Extend the `track` function with min/max/average execution time and call
# count.
import functools
import random
import time
from datetime import datetime, timedelta
def track(function=None, label=None):
# Trick to add an optional argument to our decorator
if label and not function:
return functools.partial(track, label=label)
execution_times = dict(
min=timedelta.max,
max=timedelta.min,
total=timedelta(),
count=0,
)
print(f'initializing {label}')
@functools.wraps(function)
def _track(*args, **kwargs):
print(f'calling {label}')
start = datetime.now()
result = function(*args, **kwargs)
end = datetime.now()
duration = end - start
execution_times['min'] = min(execution_times['min'], duration)
execution_times['max'] = max(execution_times['max'], duration)
execution_times['total'] += duration
execution_times['count'] += 1
print(f'called {label} in {duration}')
return result
def print_stats():
print(f'{label} stats:')
print(f' min: {execution_times["min"]}')
print(f' max: {execution_times["max"]}')
print(f' total: {execution_times["total"]}')
print(f' avg: {execution_times["total"] / execution_times["count"]}')
_track.print_stats = print_stats
return _track
@track(label='random sleep')
def random_sleep():
time.sleep(random.random())
if __name__ == '__main__':
for i in range(10):
random_sleep()
random_sleep.print_stats()

View File

View File

@ -0,0 +1,46 @@
# Modify the memoization function to function with unhashable types.
import functools
cache = dict()
def memoize(function):
def safe_hash(args):
'''
In the case of unhashable types use the `repr()` to be hashable.
'''
try:
return hash(args)
except TypeError:
return repr(args)
@functools.wraps(function)
def _memoize(*args):
# If the cache is not available, call the function
# Note that all args need to be hashable
# key = function, safe_hash(args)
key = function, args
if key not in cache:
cache[key] = function(*args)
return cache[key]
return _memoize
@memoize
def printer(*args):
print(args)
def main():
# Should work as expected
printer('a', 'b', 'c')
# Would have issues with the original memoize function because the
# parameters are unhashable
printer(dict(a=1, b=2, c=3))
if __name__ == '__main__':
main()

View File

View File

@ -0,0 +1,49 @@
# Modify the memoization function to have a cache per function instead of a
# global one.
import functools
def memoize(function):
# Store the cache as attribute of the function so we can
# apply the decorator to multiple functions without
# sharing the cache.
function.cache = dict()
def safe_hash(args):
'''
In the case of unhashable types use the `repr()` to be hashable.
'''
try:
return hash(args)
except TypeError:
return repr(args)
@functools.wraps(function)
def _memoize(*args):
# If the cache is not available, call the function
# Note that all args need to be hashable
key = safe_hash(args)
if key not in function.cache:
function.cache[key] = function(*args)
return function.cache[key]
return _memoize
@memoize
def printer(*args):
print(args)
def main():
# Should work as expected
printer('a', 'b', 'c')
# Would have issues with the original memoize function because the
# parameters are unhashable
printer(dict(a=1, b=2, c=3))
if __name__ == '__main__':
main()

View File

View File

@ -0,0 +1,62 @@
# Create a version of `functools.cached_property` that can be recalculated
# as needed.
from datetime import datetime
class _NotFound:
pass
class CachedProperty:
# Note that this is a very basic version of `functools.cached_property`. If
# you wish to use this in production I suggest looking at the original
# `cached_property` decorator and implement the locking and conflict
# handling as well.
def __init__(self, func):
self.func = func
def clear(self):
self.cache.pop(self.attrname, None)
def __set_name__(self, owner, name):
if not hasattr(owner, '_cache'):
owner._cache = dict()
# Add a clear method to the owner class
setattr(owner, f'clear_{name}', self.clear)
self.cache = owner._cache
self.attrname = name
def __get__(self, instance, owner=None):
if instance is None:
return self
key = self.attrname
if key not in self.cache:
self.cache[key] = self.func(instance)
return self.cache[key]
class SomeClass:
@CachedProperty
def current_time(self):
return datetime.now()
def main():
some_class = SomeClass()
a = some_class.current_time
b = some_class.current_time
assert a == b
# Clear the cache. Even though your editor might complain, this method
# exists. Can you think of a better API to make the cache clearable?
some_class.clear_current_time()
c = some_class.current_time
assert a != c
if __name__ == '__main__':
main()

View File

View File

@ -0,0 +1,107 @@
# Create a single-dispatch decorator that considers all or a configurable
# number of arguments instead of only the first one.
import functools
import inspect
import typing
def fancysingledispatch(*disabled_args, **disabled_kwargs):
'''
A single-dispatch decorator that considers all or a configurable
number of arguments instead of only the first one.
Args:
disabled_args: A list of argument names to ignore.
disabled_kwargs: A list of keyword argument names to ignore.
'''
registry = dict()
disabled_args = set(disabled_args)
for key, value in disabled_kwargs.items():
if value:
disabled_args.add(key)
def register(function):
key_parts = []
for key, type_ in typing.get_type_hints(function).items():
if key == 'return':
# Ignore the return type
continue
if key in disabled_args:
key_parts.append(None)
else:
key_parts.append(type_)
registry[tuple(key_parts)] = function
return function
def dispatch(function):
signature = inspect.signature(function)
@functools.wraps(function)
def _dispatch(*args, **kwargs):
bound = signature.bind(*args, **kwargs)
bound.apply_defaults()
key_parts = []
for key, value in bound.arguments.items():
if key in disabled_args:
key_parts.append(None)
else:
key_parts.append(type(value))
key = tuple(key_parts)
if key in registry:
return registry[key](*args, **kwargs)
else:
raise TypeError(f'No matching function for {key}')
_dispatch.register = register
register(function)
return _dispatch
return dispatch
@fancysingledispatch(last_name=True)
def hello(first_name: str, last_name: str, age: None = None) -> str:
return f'Hello {first_name} {last_name}'
# Since this function only differs in the last_name argument, it will
# override the previous one. The original `hello` function will never get
# called again.
@hello.register
def first_name_only(
first_name: str,
last_name: None = None,
age: None = None,
) -> str:
return f'Hello {first_name}'
@hello.register
def name_age(first_name: str, last_name: str, age: int) -> str:
# Reuse the function above
return hello(first_name, last_name) + f', you are {age} years old'
@hello.register
def name_age_days(first_name: str, last_name: str, age: float) -> str:
days = int((age % 1) * 365)
age = int(age)
return hello(
first_name,
last_name
) + f', you are {age} years and {days} days old'
def main():
print(hello('Rick', 'van Hattem'))
print(hello('Rick', 'van Hattem', age=30))
print(hello('Rick', 'van Hattem', age=30.5))
if __name__ == '__main__':
main()

View File

View File

@ -0,0 +1,120 @@
# Enhance the `type_check` decorator to include additional checks such as
# requiring a number to be greater than or less than a given value.
import abc
import functools
import inspect
import pytest
# Note: the exercise erroneously mentions `type_check` instead of
# `enforce_type_hints`. For clarity the function was renamed in the text of
# the book, but it seems I forgot about the exercise.
class Constraint(abc.ABC):
def __call__(self, name, value):
return False
def to_string(self, name, value, constraint):
return f'{name}={value!r} must be {constraint}'
def __str__(self):
return self.to_string()
class Gt(Constraint):
def __init__(self, value):
self.value = value
def __call__(self, name, value):
if not value > self.value:
raise ValueError(self.to_string(name, value))
def to_string(self, name='x', value='x', constraint='x > y'):
return super().to_string(name, value, f'greater than {self.value}')
class Between(Constraint):
def __init__(self, min_value, max_value):
self.min_value = min_value
self.max_value = max_value
def __call__(self, name, value):
if not self.min_value < value < self.max_value:
raise ValueError(self.to_string(name, value))
def to_string(self, name='x', value='x', constraint='x < y < z'):
return super().to_string(
name,
value,
f'between {self.min_value} and {self.max_value}',
)
def enforce_type_hints(**constraint_kwargs):
def _enforce_type_hints(function):
# Construct the signature from the function which contains
# the type annotations
signature = inspect.signature(function)
@functools.wraps(function)
def __enforce_type_hints(*args, **kwargs):
# Bind the arguments and apply the default values
bound = signature.bind(*args, **kwargs)
bound.apply_defaults()
for key, value in bound.arguments.items():
param = signature.parameters[key]
# The annotation should be a callable
# type/function so we can cast as validation
if param.annotation:
try:
bound.arguments[key] = param.annotation(value)
except ValueError:
raise ValueError(
f'{key} must be {param.annotation.__name__}'
)
if key in constraint_kwargs:
constraint_kwargs[key](key, value)
return function(*bound.args, **bound.kwargs)
return __enforce_type_hints
return _enforce_type_hints
@enforce_type_hints(bacon=Gt(0), eggs=Between(1, 4))
def sandwich(bacon: float, eggs: int):
print(f'bacon: {bacon!r}, eggs: {eggs!r}')
def test_sandwich():
sandwich(1, 2)
sandwich(5, 3)
try:
sandwich(1, 0)
except ValueError as e:
assert str(e) == 'eggs=0 must be between 1 and 4'
else:
assert False
try:
sandwich(1, 5)
except ValueError as e:
assert str(e) == 'eggs=5 must be between 1 and 4'
else:
assert False
try:
sandwich(0, 5)
except ValueError as e:
assert str(e) == 'bacon=0 must be greater than 0'
else:
assert False
if __name__ == '__main__':
pytest.main(['-vv'])

View File

@ -0,0 +1,31 @@
# Create a generator similar to `itertools.islice()` that allows for a
# negative step so you can execute `some_list[20:10:-1]`.
import itertools
def islice(iterable, start, stop, step):
if step > 0:
assert start <= stop, 'start must be less than stop'
yield from itertools.islice(iterable, start, stop, step)
else:
assert start >= stop, 'start must be greater than stop for negative step'
output = []
for i, item in enumerate(iterable):
if i >= stop:
output.append(item)
if i >= start:
break
yield from reversed(output)
def main():
some_iterable = iter(range(100))
print(list(islice(some_iterable, 20, 10, -1)))
print(list(islice(some_iterable, 10, 20, 1)))
if __name__ == '__main__':
main()

View File

@ -0,0 +1,21 @@
# Create a class that wraps a generator so it becomes sliceable by using
# `itertools.islice()` internally.
import itertools
class SliceableGenerator:
def __init__(self, gen):
self.gen = gen
def __getitem__(self, index):
return list(itertools.islice(self.gen, index.start, index.stop))
def main():
generator = SliceableGenerator(itertools.count())
print(generator[10:20])
if __name__ == '__main__':
main()

View File

@ -0,0 +1,18 @@
# Write a generator for the Fibonacci numbers.
def fibonacci():
a, b = 0, 1
while True:
yield a
a, b = b, a + b
def main():
fib = fibonacci()
for _ in range(10):
print(next(fib))
if __name__ == '__main__':
main()

View File

@ -0,0 +1,23 @@
# Write a generator that uses the sieve of Eratosthenes to generate prime
# numbers.
import itertools
def generate_primes():
'''Generate prime numbers using the sieve of Eratosthenes.'''
primes = []
for i in itertools.count(2):
if all(i % prime != 0 for prime in primes):
primes.append(i)
yield i
def main():
primes = generate_primes()
for _ in range(20):
print(next(primes))
if __name__ == '__main__':
main()

View File

@ -0,0 +1,16 @@
# Create a metaclass to test if attributes/methods are available.
class ExpectedAttrsMeta(type):
_expected_attrs = ['buy', 'sell']
def __new__(cls, name, bases, attrs):
for attr in cls._expected_attrs:
if attr not in attrs:
raise AttributeError(
f'{attr} attribute is missing from {name} class'
)
return super().__new__(cls, name, bases, attrs)
class Trade(metaclass=ExpectedAttrsMeta):
pass

View File

@ -0,0 +1,25 @@
# Create a metaclass to test if specific classes are inherited.
class SomeBaseClass:
pass
class ExpectedBasesMeta(type):
_expected_bases = [SomeBaseClass]
def __new__(cls, name, bases, attrs):
for base in cls._expected_bases:
if base not in bases:
raise TypeError(
f'{name} is not inheriting {base}'
)
return super().__new__(cls, name, bases, attrs)
class Trade(SomeBaseClass, metaclass=ExpectedBasesMeta):
pass
class BrokenTrade(metaclass=ExpectedBasesMeta):
pass

View File

@ -0,0 +1,29 @@
# Build a metaclass that wraps every method with a decorator (could be
# useful for logging/de- bugging purposes), something with a signature like
# this:
#
# class SomeClass(metaclass=WrappingMeta, wrapper=some_wrapper):
class WrappingMeta(type):
def __new__(cls, name, bases, attrs, wrapper):
for attr_name, attr_value in attrs.items():
if callable(attr_value):
attrs[attr_name] = wrapper(attr_value)
return super().__new__(cls, name, bases, attrs)
def print_call(func):
def wrapped(*args, **kwargs):
print(f'Calling {func.__name__}({args}, {kwargs})')
return func(*args, **kwargs)
return wrapped
class SomeClass(metaclass=WrappingMeta, wrapper=print_call):
def some_method(self):
print('some_method() called')
if __name__ == '__main__':
SomeClass().some_method()