ch21 examples

This commit is contained in:
Luciano Ramalho 2021-02-12 22:53:10 -03:00
parent 93bb4407fa
commit ace44eeaf2
16 changed files with 854 additions and 0 deletions

View File

@ -0,0 +1,31 @@
"""
Experiment with ``ThreadPoolExecutor.map``
"""
# tag::EXECUTOR_MAP[]
from time import sleep, strftime
from concurrent import futures
def display(*args): # <1>
print(strftime('[%H:%M:%S]'), end=' ')
print(*args)
def loiter(n): # <2>
msg = '{}loiter({}): doing nothing for {}s...'
display(msg.format('\t'*n, n, n))
sleep(n)
msg = '{}loiter({}): done.'
display(msg.format('\t'*n, n))
return n * 10 # <3>
def main():
display('Script starting.')
executor = futures.ThreadPoolExecutor(max_workers=3) # <4>
results = executor.map(loiter, range(5)) # <5>
display('results:', results) # <6>
display('Waiting for individual results:')
for i, result in enumerate(results): # <7>
display('result {}: {}'.format(i, result))
if __name__ == '__main__':
main()
# end::EXECUTOR_MAP[]

View File

@ -0,0 +1,8 @@
AD AE AF AG AL AM AO AR AT AU AZ BA BB BD BE BF BG BH BI BJ BN BO BR BS BT
BW BY BZ CA CD CF CG CH CI CL CM CN CO CR CU CV CY CZ DE DJ DK DM DZ EC EE
EG ER ES ET FI FJ FM FR GA GB GD GE GH GM GN GQ GR GT GW GY HN HR HT HU ID
IE IL IN IQ IR IS IT JM JO JP KE KG KH KI KM KN KP KR KW KZ LA LB LC LI LK
LR LS LT LU LV LY MA MC MD ME MG MH MK ML MM MN MR MT MU MV MW MX MY MZ NA
NE NG NI NL NO NP NR NZ OM PA PE PG PH PK PL PT PW PY QA RO RS RU RW SA SB
SC SD SE SG SI SK SL SM SN SO SR SS ST SV SY SZ TD TG TH TJ TL TM TN TO TR
TT TV TW TZ UA UG US UY UZ VA VC VE VN VU WS YE ZA ZM ZW

View File

@ -0,0 +1 @@
*.gif

59
21-futures/getflags/flags.py Executable file
View File

@ -0,0 +1,59 @@
#!/usr/bin/env python3
"""Download flags of top 20 countries by population
Sequential version
Sample runs (first with new domain, so no caching ever)::
$ ./flags.py
BD BR CD CN DE EG ET FR ID IN IR JP MX NG PH PK RU TR US VN
20 downloads in 26.21s
$ ./flags.py
BD BR CD CN DE EG ET FR ID IN IR JP MX NG PH PK RU TR US VN
20 downloads in 14.57s
"""
# tag::FLAGS_PY[]
import os
import time
from typing import Callable
import requests # <1>
POP20_CC = ('CN IN US ID BR PK NG BD RU JP '
'MX PH VN ET EG DE IR TR CD FR').split() # <2>
BASE_URL = 'http://fluentpython.com/data/flags' # <3>
DEST_DIR = 'downloaded/' # <4>
def save_flag(img: bytes, filename: str) -> None: # <5>
path = os.path.join(DEST_DIR, filename)
with open(path, 'wb') as fp:
fp.write(img)
def get_flag(cc: str) -> bytes: # <6>
cc = cc.lower()
url = f'{BASE_URL}/{cc}/{cc}.gif'
resp = requests.get(url)
return resp.content
def download_many(cc_list: list[str]) -> int: # <7>
for cc in sorted(cc_list): # <8>
image = get_flag(cc)
print(cc, end=' ', flush=True) # <9>
save_flag(image, cc.lower() + '.gif')
return len(cc_list)
def main(downloader: Callable[[list[str]], int]) -> None: # <10>
t0 = time.perf_counter() # <11>
count = downloader(POP20_CC)
elapsed = time.perf_counter() - t0
print(f'\n{count} downloads in {elapsed:.2f}s')
if __name__ == '__main__':
main(download_many) # <12>
# end::FLAGS_PY[]

Binary file not shown.

View File

@ -0,0 +1,113 @@
#!/usr/bin/env python3
"""Download flags of countries (with error handling).
asyncio async/await version
"""
# BEGIN FLAGS2_ASYNCIO_TOP
import asyncio
from collections import Counter
import aiohttp
from aiohttp import web
from aiohttp.http_exceptions import HttpProcessingError
import tqdm # type: ignore
from flags2_common import main, HTTPStatus, Result, save_flag
# default set low to avoid errors from remote site, such as
# 503 - Service Temporarily Unavailable
DEFAULT_CONCUR_REQ = 5
MAX_CONCUR_REQ = 1000
class FetchError(Exception): # <1>
def __init__(self, country_code):
self.country_code = country_code
async def get_flag(session, base_url, cc): # <2>
url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower())
async with session.get(url) as resp:
if resp.status == 200:
return await resp.read()
elif resp.status == 404:
raise web.HTTPNotFound()
else:
raise HttpProcessingError(
code=resp.status, message=resp.reason,
headers=resp.headers)
async def download_one(session, cc, base_url, semaphore, verbose): # <3>
try:
async with semaphore: # <4>
image = await get_flag(session, base_url, cc) # <5>
except web.HTTPNotFound: # <6>
status = HTTPStatus.not_found
msg = 'not found'
except Exception as exc:
raise FetchError(cc) from exc # <7>
else:
save_flag(image, cc.lower() + '.gif') # <8>
status = HTTPStatus.ok
msg = 'OK'
if verbose and msg:
print(cc, msg)
return Result(status, cc)
# END FLAGS2_ASYNCIO_TOP
# BEGIN FLAGS2_ASYNCIO_DOWNLOAD_MANY
async def downloader_coro(cc_list: list[str],
base_url: str,
verbose: bool,
concur_req: int) -> Counter[HTTPStatus]: # <1>
counter: Counter[HTTPStatus] = Counter()
semaphore = asyncio.Semaphore(concur_req) # <2>
async with aiohttp.ClientSession() as session: # <8>
to_do = [download_one(session, cc, base_url, semaphore, verbose)
for cc in sorted(cc_list)] # <3>
to_do_iter = asyncio.as_completed(to_do) # <4>
if not verbose:
to_do_iter = tqdm.tqdm(to_do_iter, total=len(cc_list)) # <5>
for future in to_do_iter: # <6>
try:
res = await future # <7>
except FetchError as exc: # <8>
country_code = exc.country_code # <9>
try:
if exc.__cause__ is None:
error_msg = 'Unknown cause'
else:
error_msg = exc.__cause__.args[0] # <10>
except IndexError:
error_msg = exc.__cause__.__class__.__name__ # <11>
if verbose and error_msg:
msg = '*** Error for {}: {}'
print(msg.format(country_code, error_msg))
status = HTTPStatus.error
else:
status = res.status
counter[status] += 1 # <12>
return counter # <13>
def download_many(cc_list: list[str],
base_url: str,
verbose: bool,
concur_req: int) -> Counter[HTTPStatus]:
coro = downloader_coro(cc_list, base_url, verbose, concur_req)
counts = asyncio.run(coro) # <14>
return counts
if __name__ == '__main__':
main(download_many, DEFAULT_CONCUR_REQ, MAX_CONCUR_REQ)
# END FLAGS2_ASYNCIO_DOWNLOAD_MANY

View File

@ -0,0 +1,155 @@
"""Utilities for second set of flag examples.
"""
import os
import time
import sys
import string
import argparse
from collections import namedtuple, Counter
from enum import Enum
Result = namedtuple('Result', 'status data')
HTTPStatus = Enum('HTTPStatus', 'ok not_found error')
POP20_CC = ('CN IN US ID BR PK NG BD RU JP '
'MX PH VN ET EG DE IR TR CD FR').split()
DEFAULT_CONCUR_REQ = 1
MAX_CONCUR_REQ = 1
SERVERS = {
'REMOTE': 'http://fluentpython.com/data/flags',
'LOCAL': 'http://localhost:8000/flags',
'DELAY': 'http://localhost:8001/flags',
'ERROR': 'http://localhost:8002/flags',
}
DEFAULT_SERVER = 'LOCAL'
DEST_DIR = 'downloaded/'
COUNTRY_CODES_FILE = 'country_codes.txt'
def save_flag(img: bytes, filename: str) -> None:
path = os.path.join(DEST_DIR, filename)
with open(path, 'wb') as fp:
fp.write(img)
def initial_report(cc_list: list[str],
actual_req: int,
server_label: str) -> None:
if len(cc_list) <= 10:
cc_msg = ', '.join(cc_list)
else:
cc_msg = 'from {} to {}'.format(cc_list[0], cc_list[-1])
print('{} site: {}'.format(server_label, SERVERS[server_label]))
msg = 'Searching for {} flag{}: {}'
plural = 's' if len(cc_list) != 1 else ''
print(msg.format(len(cc_list), plural, cc_msg))
plural = 's' if actual_req != 1 else ''
msg = '{} concurrent connection{} will be used.'
print(msg.format(actual_req, plural))
def final_report(cc_list: list[str],
counter: Counter[HTTPStatus],
start_time: float) -> None:
elapsed = time.time() - start_time
print('-' * 20)
msg = '{} flag{} downloaded.'
plural = 's' if counter[HTTPStatus.ok] != 1 else ''
print(msg.format(counter[HTTPStatus.ok], plural))
if counter[HTTPStatus.not_found]:
print(counter[HTTPStatus.not_found], 'not found.')
if counter[HTTPStatus.error]:
plural = 's' if counter[HTTPStatus.error] != 1 else ''
print('{} error{}.'.format(counter[HTTPStatus.error], plural))
print('Elapsed time: {:.2f}s'.format(elapsed))
def expand_cc_args(every_cc: bool,
all_cc: bool,
cc_args: list[str],
limit: int) -> list[str]:
codes: set[str] = set()
A_Z = string.ascii_uppercase
if every_cc:
codes.update(a+b for a in A_Z for b in A_Z)
elif all_cc:
with open(COUNTRY_CODES_FILE) as fp:
text = fp.read()
codes.update(text.split())
else:
for cc in (c.upper() for c in cc_args):
if len(cc) == 1 and cc in A_Z:
codes.update(cc+c for c in A_Z)
elif len(cc) == 2 and all(c in A_Z for c in cc):
codes.add(cc)
else:
msg = 'each CC argument must be A to Z or AA to ZZ.'
raise ValueError('*** Usage error: '+msg)
return sorted(codes)[:limit]
def process_args(default_concur_req):
server_options = ', '.join(sorted(SERVERS))
parser = argparse.ArgumentParser(
description='Download flags for country codes. '
'Default: top 20 countries by population.')
parser.add_argument('cc', metavar='CC', nargs='*',
help='country code or 1st letter (eg. B for BA...BZ)')
parser.add_argument('-a', '--all', action='store_true',
help='get all available flags (AD to ZW)')
parser.add_argument('-e', '--every', action='store_true',
help='get flags for every possible code (AA...ZZ)')
parser.add_argument('-l', '--limit', metavar='N', type=int,
help='limit to N first codes', default=sys.maxsize)
parser.add_argument('-m', '--max_req', metavar='CONCURRENT', type=int,
default=default_concur_req,
help=f'maximum concurrent requests (default={default_concur_req})')
parser.add_argument('-s', '--server', metavar='LABEL',
default=DEFAULT_SERVER,
help=('Server to hit; one of ' +
f'{server_options} (default={DEFAULT_SERVER})'))
parser.add_argument('-v', '--verbose', action='store_true',
help='output detailed progress info')
args = parser.parse_args()
if args.max_req < 1:
print('*** Usage error: --max_req CONCURRENT must be >= 1')
parser.print_usage()
sys.exit(1)
if args.limit < 1:
print('*** Usage error: --limit N must be >= 1')
parser.print_usage()
sys.exit(1)
args.server = args.server.upper()
if args.server not in SERVERS:
print('*** Usage error: --server LABEL must be one of',
server_options)
parser.print_usage()
sys.exit(1)
try:
cc_list = expand_cc_args(args.every, args.all, args.cc, args.limit)
except ValueError as exc:
print(exc.args[0])
parser.print_usage()
sys.exit(1)
if not cc_list:
cc_list = sorted(POP20_CC)
return args, cc_list
def main(download_many, default_concur_req, max_concur_req):
args, cc_list = process_args(default_concur_req)
actual_req = min(args.max_req, max_concur_req, len(cc_list))
initial_report(cc_list, actual_req, args.server)
base_url = SERVERS[args.server]
t0 = time.time()
counter = download_many(cc_list, base_url, args.verbose, actual_req)
assert sum(counter.values()) == len(cc_list), \
'some downloads are unaccounted for'
final_report(cc_list, counter, t0)

View File

@ -0,0 +1,92 @@
#!/usr/bin/env python3
"""Download flags of countries (with error handling).
Sequential version
Sample run::
$ python3 flags2_sequential.py -s DELAY b
DELAY site: http://localhost:8002/flags
Searching for 26 flags: from BA to BZ
1 concurrent connection will be used.
--------------------
17 flags downloaded.
9 not found.
Elapsed time: 13.36s
"""
from collections import Counter
import requests
import tqdm # type: ignore
from flags2_common import main, save_flag, HTTPStatus, Result
DEFAULT_CONCUR_REQ = 1
MAX_CONCUR_REQ = 1
# tag::FLAGS2_BASIC_HTTP_FUNCTIONS[]
def get_flag(base_url: str, cc: str) -> bytes:
cc = cc.lower()
url = f'{base_url}/{cc}/{cc}.gif'
resp = requests.get(url)
if resp.status_code != 200: # <1>
resp.raise_for_status()
return resp.content
def download_one(cc: str, base_url: str, verbose: bool = False):
try:
image = get_flag(base_url, cc)
except requests.exceptions.HTTPError as exc: # <2>
res = exc.response
if res.status_code == 404:
status = HTTPStatus.not_found # <3>
msg = 'not found'
else: # <4>
raise
else:
save_flag(image, cc.lower() + '.gif')
status = HTTPStatus.ok
msg = 'OK'
if verbose: # <5>
print(cc, msg)
return Result(status, cc) # <6>
# end::FLAGS2_BASIC_HTTP_FUNCTIONS[]
# tag::FLAGS2_DOWNLOAD_MANY_SEQUENTIAL[]
def download_many(cc_list: list[str],
base_url: str,
verbose: bool,
_unused_concur_req: int) -> Counter[int]:
counter: Counter[int] = Counter() # <1>
cc_iter = sorted(cc_list) # <2>
if not verbose:
cc_iter = tqdm.tqdm(cc_iter) # <3>
for cc in cc_iter: # <4>
try:
res = download_one(cc, base_url, verbose) # <5>
except requests.exceptions.HTTPError as exc: # <6>
error_msg = 'HTTP error {res.status_code} - {res.reason}'
error_msg = error_msg.format(res=exc.response)
except requests.exceptions.ConnectionError: # <7>
error_msg = 'Connection error'
else: # <8>
error_msg = ''
status = res.status
if error_msg:
status = HTTPStatus.error # <9>
counter[status] += 1 # <10>
if verbose and error_msg: # <11>
print(f'*** Error for {cc}: {error_msg}')
return counter # <12>
# end::FLAGS2_DOWNLOAD_MANY_SEQUENTIAL[]
if __name__ == '__main__':
main(download_many, DEFAULT_CONCUR_REQ, MAX_CONCUR_REQ)

View File

@ -0,0 +1,73 @@
#!/usr/bin/env python3
"""Download flags of countries (with error handling).
ThreadPool version
Sample run::
$ python3 flags2_threadpool.py -s ERROR -e
ERROR site: http://localhost:8003/flags
Searching for 676 flags: from AA to ZZ
30 concurrent connections will be used.
--------------------
150 flags downloaded.
361 not found.
165 errors.
Elapsed time: 7.46s
"""
# tag::FLAGS2_THREADPOOL[]
from collections import Counter
from concurrent import futures
import requests
import tqdm # type: ignore # <1>
from flags2_common import main, HTTPStatus # <2>
from flags2_sequential import download_one # <3>
DEFAULT_CONCUR_REQ = 30 # <4>
MAX_CONCUR_REQ = 1000 # <5>
def download_many(cc_list: list[str],
base_url: str,
verbose: bool,
concur_req: int) -> Counter[int]:
counter: Counter[int] = Counter()
with futures.ThreadPoolExecutor(max_workers=concur_req) as executor: # <6>
to_do_map = {} # <7>
for cc in sorted(cc_list): # <8>
future = executor.submit(download_one,
cc, base_url, verbose) # <9>
to_do_map[future] = cc # <10>
done_iter = futures.as_completed(to_do_map) # <11>
if not verbose:
done_iter = tqdm.tqdm(done_iter, total=len(cc_list)) # <12>
for future in done_iter: # <13>
try:
res = future.result() # <14>
except requests.exceptions.HTTPError as exc: # <15>
error_msg = 'HTTP {res.status_code} - {res.reason}'
error_msg = error_msg.format(res=exc.response)
except requests.exceptions.ConnectionError:
error_msg = 'Connection error'
else:
error_msg = ''
status = res.status
if error_msg:
status = HTTPStatus.error
counter[status] += 1
if verbose and error_msg:
cc = to_do_map[future] # <16>
print('*** Error for {}: {}'.format(cc, error_msg))
return counter
if __name__ == '__main__':
main(download_many, DEFAULT_CONCUR_REQ, MAX_CONCUR_REQ)
# end::FLAGS2_THREADPOOL[]

View File

@ -0,0 +1,47 @@
#!/usr/bin/env python3
"""Download flags of top 20 countries by population
asyncio + aiottp version
Sample run::
$ python3 flags_asyncio.py
EG VN IN TR RU ID US DE CN MX JP BD NG ET FR BR PH PK CD IR
20 flags downloaded in 1.07s
"""
# tag::FLAGS_ASYNCIO[]
import asyncio
from aiohttp import ClientSession # <1>
from flags import BASE_URL, save_flag, main # <2>
async def get_flag(session: ClientSession ,cc: str) -> bytes: # <3>
cc = cc.lower()
url = f'{BASE_URL}/{cc}/{cc}.gif'
async with session.get(url) as resp: # <4>
print(resp)
return await resp.read() # <5>
async def download_one(session: ClientSession, cc: str): # <6>
image = await get_flag(session, cc)
print(cc, end=' ', flush=True)
save_flag(image, cc.lower() + '.gif')
return cc
async def supervisor(cc_list):
async with ClientSession() as session: # <7>
to_do = [download_one(session, cc)
for cc in sorted(cc_list)] # <8>
res = await asyncio.gather(*to_do) # <9>
return len(res)
def download_many(cc_list):
return asyncio.run(supervisor(cc_list)) # <10>
if __name__ == '__main__':
main(download_many)
# end::FLAGS_ASYNCIO[]

View File

@ -0,0 +1,34 @@
#!/usr/bin/env python3
"""Download flags of top 20 countries by population
ThreadPoolExecutor version
Sample run::
$ python3 flags_threadpool.py
DE FR BD CN EG RU IN TR VN ID JP BR NG MX PK ET PH CD US IR
20 downloads in 0.35s
"""
# tag::FLAGS_THREADPOOL[]
from concurrent import futures
from flags import save_flag, get_flag, main # <1>
def download_one(cc: str): # <2>
image = get_flag(cc)
print(cc, end=' ', flush=True)
save_flag(image, cc.lower() + '.gif')
return cc
def download_many(cc_list: list[str]) -> int:
with futures.ThreadPoolExecutor() as executor: # <3>
res = executor.map(download_one, sorted(cc_list)) # <4>
return len(list(res)) # <5>
if __name__ == '__main__':
main(download_many) # <6>
# end::FLAGS_THREADPOOL[]

View File

@ -0,0 +1,36 @@
#!/usr/bin/env python3
"""Download flags of top 20 countries by population
ThreadPoolExecutor example with ``as_completed``.
"""
from concurrent import futures
from flags import save_flag, main
from flags_threadpool import download_one
# tag::FLAGS_THREADPOOL_AS_COMPLETED[]
def download_many(cc_list: list[str]) -> int:
cc_list = cc_list[:5] # <1>
with futures.ThreadPoolExecutor(max_workers=3) as executor: # <2>
to_do: list[futures.Future] = []
for cc in sorted(cc_list): # <3>
future = executor.submit(download_one, cc) # <4>
to_do.append(future) # <5>
msg = 'Scheduled for {}: {}'
print(msg.format(cc, future)) # <6>
count = 0
for future in futures.as_completed(to_do): # <7>
res: str = future.result() # <8>
msg = '{} result: {!r}'
print(msg.format(future, res)) # <9>
count += 1
return count
# end::FLAGS_THREADPOOL_AS_COMPLETED[]
if __name__ == '__main__':
main(download_many)

View File

@ -0,0 +1,11 @@
aiohttp==3.7.3
async-timeout==3.0.1
attrs==20.3.0
certifi==2020.12.5
chardet==4.0.0
idna==2.10
requests==2.25.1
urllib3==1.26.3
tqdm==4.56.2
multidict==5.1.0
yarl==1.6.3

View File

@ -0,0 +1,92 @@
#!/usr/bin/env python3
"""Slow HTTP server class.
This module implements a ThreadingHTTPServer using a custom
SimpleHTTPRequestHandler subclass that introduces delays to all
GET responses, and optionally returns errors to a fraction of
the requests if given the --error_rate command-line argument.
"""
import time
import os
import socket
import contextlib
from functools import partial
from random import random
from http import server, HTTPStatus
from http.server import ThreadingHTTPServer, SimpleHTTPRequestHandler
class SlowHTTPRequestHandler(SimpleHTTPRequestHandler):
"""SlowHTTPRequestHandler adds delays and errors to test HTTP clients.
The optional error_rate argument determines how often GET requests
receive a 418 status code, "I'm a teapot".
If error_rate is .15, there's a 15% probability of each GET request
getting that error.
When the server believes it is a teapot, it refuses requests to serve files.
See: https://tools.ietf.org/html/rfc2324#section-2.3.2
"""
def __init__(self, *args, error_rate=0.0, **kwargs):
self.error_rate = error_rate
super().__init__(*args, **kwargs)
def do_GET(self):
"""Serve a GET request."""
time.sleep(.5)
if random() < self.error_rate:
self.send_error(HTTPStatus.IM_A_TEAPOT, "I'm a Teapot")
else:
f = self.send_head()
if f:
try:
self.copyfile(f, self.wfile)
finally:
f.close()
# The code in the `if` block below, including comments, was copied
# and adapted from the `http.server` module of Python 3.9
# https://github.com/python/cpython/blob/master/Lib/http/server.py
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--bind', '-b', metavar='ADDRESS',
help='Specify alternate bind address '
'[default: all interfaces]')
parser.add_argument('--directory', '-d', default=os.getcwd(),
help='Specify alternative directory '
'[default:current directory]')
parser.add_argument('--error-rate', '-e', metavar='PROBABILITY',
default=0.0, type=float,
help='Error rate; e.g. use .25 for 25%% probability '
'[default:0.0]')
parser.add_argument('port', action='store',
default=8000, type=int,
nargs='?',
help='Specify alternate port [default: 8000]')
args = parser.parse_args()
handler_class = partial(SlowHTTPRequestHandler,
directory=args.directory,
error_rate=args.error_rate)
# ensure dual-stack is not disabled; ref #38907
class DualStackServer(ThreadingHTTPServer):
def server_bind(self):
# suppress exception when protocol is IPv4
with contextlib.suppress(Exception):
self.socket.setsockopt(
socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
return super().server_bind()
server.test(
HandlerClass=handler_class,
ServerClass=DualStackServer,
port=args.port,
bind=args.bind,
)

52
21-futures/primes/primes.py Executable file
View File

@ -0,0 +1,52 @@
#!/usr/bin/env python3
import math
PRIME_FIXTURE = [
(2, True),
(142702110479723, True),
(299593572317531, True),
(3333333333333301, True),
(3333333333333333, False),
(3333335652092209, False),
(4444444444444423, True),
(4444444444444444, False),
(4444444488888889, False),
(5555553133149889, False),
(5555555555555503, True),
(5555555555555555, False),
(6666666666666666, False),
(6666666666666719, True),
(6666667141414921, False),
(7777777536340681, False),
(7777777777777753, True),
(7777777777777777, False),
(9999999999999917, True),
(9999999999999999, False),
]
NUMBERS = [n for n, _ in PRIME_FIXTURE]
# tag::IS_PRIME[]
def is_prime(n: int) -> bool:
if n < 2:
return False
if n == 2:
return True
if n % 2 == 0:
return False
root = math.isqrt(n)
for i in range(3, root + 1, 2):
if n % i == 0:
return False
return True
# end::IS_PRIME[]
if __name__ == '__main__':
for n, prime in PRIME_FIXTURE:
prime_res = is_prime(n)
assert prime_res == prime
print(n, prime)

50
21-futures/primes/proc_pool.py Executable file
View File

@ -0,0 +1,50 @@
#!/usr/bin/env python3
"""
proc_pool.py: a version of the proc.py example from chapter 20,
but using `concurrent.futures.ProcessPoolExecutor`.
"""
# tag::PRIMES_POOL[]
from time import perf_counter
from typing import NamedTuple
from concurrent import futures # <1>
import sys
from primes import is_prime, NUMBERS
class PrimeResult(NamedTuple): # <2>
n: int
flag: bool
elapsed: float
def check(n: int) -> PrimeResult:
t0 = perf_counter()
res = is_prime(n)
return PrimeResult(n, res, perf_counter() - t0)
def main() -> None:
if len(sys.argv) < 2:
workers = None # <3>
else:
workers = int(sys.argv[1])
executor = futures.ProcessPoolExecutor(workers) # <4>
actual_workers = executor._max_workers # type: ignore # <5>
print(f'Checking {len(NUMBERS)} numbers with {actual_workers} processes:')
t0 = perf_counter()
numbers = sorted(NUMBERS, reverse=True) # <6>
with executor: # <7>
for n, prime, elapsed in executor.map(check, numbers): # <8>
label = 'P' if prime else ' '
print(f'{n:16} {label} {elapsed:9.6f}s')
time = perf_counter() - t0
print('Total time:', f'{time:0.2f}s')
if __name__ == '__main__':
main()
# end::PRIMES_POOL[]