ch21 cleanup, f-strings, and pathlib

This commit is contained in:
Miroslav Šedivý 2021-02-13 19:11:54 +01:00
parent c5114a5679
commit 93dfeaeb80
11 changed files with 70 additions and 81 deletions

View File

@ -24,7 +24,7 @@ def main():
display('results:', results) # <6>
display('Waiting for individual results:')
for i, result in enumerate(results): # <7>
display('result {}: {}'.format(i, result))
display(f'result {i}: {result}')
if __name__ == '__main__':
main()

View File

@ -17,8 +17,8 @@ Sample runs (first with new domain, so no caching ever)::
"""
# tag::FLAGS_PY[]
import os
import time
from pathlib import Path
from typing import Callable
import requests # <1>
@ -27,12 +27,10 @@ POP20_CC = ('CN IN US ID BR PK NG BD RU JP '
'MX PH VN ET EG DE IR TR CD FR').split() # <2>
BASE_URL = 'http://fluentpython.com/data/flags' # <3>
DEST_DIR = 'downloaded/' # <4>
DEST_DIR = Path('downloaded') # <4>
def save_flag(img: bytes, filename: str) -> None: # <5>
path = os.path.join(DEST_DIR, filename)
with open(path, 'wb') as fp:
fp.write(img)
(DEST_DIR / filename).write_bytes(img)
def get_flag(cc: str) -> bytes: # <6>
cc = cc.lower()
@ -45,7 +43,6 @@ def download_many(cc_list: list[str]) -> int: # <7>
image = get_flag(cc)
print(cc, end=' ', flush=True) # <9>
save_flag(image, cc.lower() + '.gif')
return len(cc_list)
def main(downloader: Callable[[list[str]], int]) -> None: # <10>

View File

@ -28,7 +28,8 @@ class FetchError(Exception): # <1>
async def get_flag(session, base_url, cc): # <2>
url = '{}/{cc}/{cc}.gif'.format(base_url, cc=cc.lower())
cc = cc.lower()
url = f'{base_url}/{cc}/{cc}.gif'
async with session.get(url) as resp:
if resp.status == 200:
return await resp.read()
@ -87,8 +88,7 @@ async def downloader_coro(cc_list: list[str],
except IndexError:
error_msg = exc.__cause__.__class__.__name__ # <11>
if verbose and error_msg:
msg = '*** Error for {}: {}'
print(msg.format(country_code, error_msg))
print(f'*** Error for {country_code}: {error_msg}')
status = HTTPStatus.error
else:
status = res.status

View File

@ -1,14 +1,13 @@
"""Utilities for second set of flag examples.
"""
import os
import time
import sys
import string
import argparse
import string
import sys
import time
from collections import namedtuple, Counter
from enum import Enum
from pathlib import Path
Result = namedtuple('Result', 'status data')
@ -28,14 +27,12 @@ SERVERS = {
}
DEFAULT_SERVER = 'LOCAL'
DEST_DIR = 'downloaded/'
COUNTRY_CODES_FILE = 'country_codes.txt'
DEST_DIR = Path('downloaded')
COUNTRY_CODES_FILE = Path('country_codes.txt')
def save_flag(img: bytes, filename: str) -> None:
path = os.path.join(DEST_DIR, filename)
with open(path, 'wb') as fp:
fp.write(img)
(DEST_DIR / filename).write_bytes(img)
def initial_report(cc_list: list[str],
@ -44,30 +41,27 @@ def initial_report(cc_list: list[str],
if len(cc_list) <= 10:
cc_msg = ', '.join(cc_list)
else:
cc_msg = 'from {} to {}'.format(cc_list[0], cc_list[-1])
print('{} site: {}'.format(server_label, SERVERS[server_label]))
msg = 'Searching for {} flag{}: {}'
cc_msg = f'from {cc_list[0]} to {cc_list[-1]}'
print(f'{server_label} site: {SERVERS[server_label]}')
plural = 's' if len(cc_list) != 1 else ''
print(msg.format(len(cc_list), plural, cc_msg))
print(f'Searching for {len(cc_list)} flag{plural}: {cc_msg}')
plural = 's' if actual_req != 1 else ''
msg = '{} concurrent connection{} will be used.'
print(msg.format(actual_req, plural))
print(f'{actual_req} concurrent connection{plural} will be used.')
def final_report(cc_list: list[str],
counter: Counter[HTTPStatus],
start_time: float) -> None:
elapsed = time.time() - start_time
elapsed = time.perf_counter() - start_time
print('-' * 20)
msg = '{} flag{} downloaded.'
plural = 's' if counter[HTTPStatus.ok] != 1 else ''
print(msg.format(counter[HTTPStatus.ok], plural))
print(f'{counter[HTTPStatus.ok]} flag{plural} downloaded.')
if counter[HTTPStatus.not_found]:
print(counter[HTTPStatus.not_found], 'not found.')
print(f'{counter[HTTPStatus.not_found]} not found.')
if counter[HTTPStatus.error]:
plural = 's' if counter[HTTPStatus.error] != 1 else ''
print('{} error{}.'.format(counter[HTTPStatus.error], plural))
print('Elapsed time: {:.2f}s'.format(elapsed))
print(f'{counter[HTTPStatus.error]} error{plural}')
print(f'Elapsed time: {elapsed:.2f}s')
def expand_cc_args(every_cc: bool,
@ -75,22 +69,21 @@ def expand_cc_args(every_cc: bool,
cc_args: list[str],
limit: int) -> list[str]:
codes: set[str] = set()
A_Z = string.ascii_uppercase
A_Z = set(string.ascii_uppercase)
if every_cc:
codes.update(a+b for a in A_Z for b in A_Z)
codes.update(f'{a}{b}' for a in A_Z for b in A_Z)
elif all_cc:
with open(COUNTRY_CODES_FILE) as fp:
text = fp.read()
text = COUNTRY_CODES_FILE.read_text()
codes.update(text.split())
else:
for cc in (c.upper() for c in cc_args):
if len(cc) == 1 and cc in A_Z:
codes.update(cc+c for c in A_Z)
codes.update(cc + c for c in A_Z)
elif len(cc) == 2 and all(c in A_Z for c in cc):
codes.add(cc)
else:
msg = 'each CC argument must be A to Z or AA to ZZ.'
raise ValueError('*** Usage error: '+msg)
raise ValueError('*** Usage error: each CC argument '
'must be A to Z or AA to ZZ.')
return sorted(codes)[:limit]
@ -99,22 +92,28 @@ def process_args(default_concur_req):
parser = argparse.ArgumentParser(
description='Download flags for country codes. '
'Default: top 20 countries by population.')
parser.add_argument('cc', metavar='CC', nargs='*',
parser.add_argument(
'cc', metavar='CC', nargs='*',
help='country code or 1st letter (eg. B for BA...BZ)')
parser.add_argument('-a', '--all', action='store_true',
parser.add_argument(
'-a', '--all', action='store_true',
help='get all available flags (AD to ZW)')
parser.add_argument('-e', '--every', action='store_true',
parser.add_argument(
'-e', '--every', action='store_true',
help='get flags for every possible code (AA...ZZ)')
parser.add_argument('-l', '--limit', metavar='N', type=int,
help='limit to N first codes', default=sys.maxsize)
parser.add_argument('-m', '--max_req', metavar='CONCURRENT', type=int,
parser.add_argument(
'-l', '--limit', metavar='N', type=int, help='limit to N first codes',
default=sys.maxsize)
parser.add_argument(
'-m', '--max_req', metavar='CONCURRENT', type=int,
default=default_concur_req,
help=f'maximum concurrent requests (default={default_concur_req})')
parser.add_argument('-s', '--server', metavar='LABEL',
default=DEFAULT_SERVER,
help=('Server to hit; one of ' +
f'{server_options} (default={DEFAULT_SERVER})'))
parser.add_argument('-v', '--verbose', action='store_true',
parser.add_argument(
'-s', '--server', metavar='LABEL', default=DEFAULT_SERVER,
help=f'Server to hit; one of {server_options} '
f'(default={DEFAULT_SERVER})')
parser.add_argument(
'-v', '--verbose', action='store_true',
help='output detailed progress info')
args = parser.parse_args()
if args.max_req < 1:
@ -127,8 +126,8 @@ def process_args(default_concur_req):
sys.exit(1)
args.server = args.server.upper()
if args.server not in SERVERS:
print('*** Usage error: --server LABEL must be one of',
server_options)
print(f'*** Usage error: --server LABEL '
f'must be one of {server_options}')
parser.print_usage()
sys.exit(1)
try:
@ -148,8 +147,9 @@ def main(download_many, default_concur_req, max_concur_req):
actual_req = min(args.max_req, max_concur_req, len(cc_list))
initial_report(cc_list, actual_req, args.server)
base_url = SERVERS[args.server]
t0 = time.time()
t0 = time.perf_counter()
counter = download_many(cc_list, base_url, args.verbose, actual_req)
assert sum(counter.values()) == len(cc_list), \
assert sum(counter.values()) == len(cc_list), (
'some downloads are unaccounted for'
)
final_report(cc_list, counter, t0)

View File

@ -24,7 +24,6 @@ import tqdm # type: ignore
from flags2_common import main, save_flag, HTTPStatus, Result
DEFAULT_CONCUR_REQ = 1
MAX_CONCUR_REQ = 1

View File

@ -50,8 +50,8 @@ def download_many(cc_list: list[str],
try:
res = future.result() # <14>
except requests.exceptions.HTTPError as exc: # <15>
error_msg = 'HTTP {res.status_code} - {res.reason}'
error_msg = error_msg.format(res=exc.response)
error_fmt = 'HTTP {res.status_code} - {res.reason}'
error_msg = error_fmt.format(res=exc.response)
except requests.exceptions.ConnectionError:
error_msg = 'Connection error'
else:
@ -63,7 +63,7 @@ def download_many(cc_list: list[str],
counter[status] += 1
if verbose and error_msg:
cc = to_do_map[future] # <16>
print('*** Error for {}: {}'.format(cc, error_msg))
print(f'*** Error for {cc}: {error_msg}')
return counter

View File

@ -18,7 +18,7 @@ from aiohttp import ClientSession # <1>
from flags import BASE_URL, save_flag, main # <2>
async def get_flag(session: ClientSession ,cc: str) -> bytes: # <3>
async def get_flag(session: ClientSession, cc: str) -> bytes: # <3>
cc = cc.lower()
url = f'{BASE_URL}/{cc}/{cc}.gif'
async with session.get(url) as resp: # <4>
@ -36,7 +36,6 @@ async def supervisor(cc_list):
to_do = [download_one(session, cc)
for cc in sorted(cc_list)] # <8>
res = await asyncio.gather(*to_do) # <9>
return len(res)
def download_many(cc_list):

View File

@ -6,7 +6,7 @@ ThreadPoolExecutor example with ``as_completed``.
"""
from concurrent import futures
from flags import save_flag, main
from flags import main
from flags_threadpool import download_one
@ -18,19 +18,14 @@ def download_many(cc_list: list[str]) -> int:
for cc in sorted(cc_list): # <3>
future = executor.submit(download_one, cc) # <4>
to_do.append(future) # <5>
msg = 'Scheduled for {}: {}'
print(msg.format(cc, future)) # <6>
print(f'Scheduled for {cc}: {future}') # <6>
count = 0
for future in futures.as_completed(to_do): # <7>
for count, future in enumerate(futures.as_completed(to_do)): # <7>
res: str = future.result() # <8>
msg = '{} result: {!r}'
print(msg.format(future, res)) # <9>
count += 1
print(f'{future} result: {res!r}') # <9>
return count
# end::FLAGS_THREADPOOL_AS_COMPLETED[]
if __name__ == '__main__':
main(download_many)

View File

@ -8,14 +8,14 @@ GET responses, and optionally returns errors to a fraction of
the requests if given the --error_rate command-line argument.
"""
import time
import contextlib
import os
import socket
import contextlib
import time
from functools import partial
from random import random
from http import server, HTTPStatus
from http.server import ThreadingHTTPServer, SimpleHTTPRequestHandler
from random import random
class SlowHTTPRequestHandler(SimpleHTTPRequestHandler):
@ -52,7 +52,6 @@ class SlowHTTPRequestHandler(SimpleHTTPRequestHandler):
# https://github.com/python/cpython/blob/master/Lib/http/server.py
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()

View File

@ -6,10 +6,10 @@ but using `concurrent.futures.ProcessPoolExecutor`.
"""
# tag::PRIMES_POOL[]
import sys
from concurrent import futures # <1>
from time import perf_counter
from typing import NamedTuple
from concurrent import futures # <1>
import sys
from primes import is_prime, NUMBERS
@ -43,7 +43,7 @@ def main() -> None:
print(f'{n:16} {label} {elapsed:9.6f}s')
time = perf_counter() - t0
print('Total time:', f'{time:0.2f}s')
print(f'Total time: {time:.2f}s')
if __name__ == '__main__':
main()