File tree Expand file tree Collapse file tree 10 files changed +252
-0
lines changed
Expand file tree Collapse file tree 10 files changed +252
-0
lines changed Original file line number Diff line number Diff line change 1+ # Speed Up Your Python Program With Concurrency: Code Examples
2+
3+ Corresponding code to the Real Python tutorial, "[ Speed up your Python Program with Concurrency] ( https://realpython.com/python-concurrency-overview/ ) ."
4+
5+ To run the code here, use:
6+
7+ pip install -r requirements.txt
8+
9+ This will ensure you have the required packages.
Original file line number Diff line number Diff line change 1+ #!/usr/bin/env python3
2+ import multiprocessing
3+ import time
4+
5+
6+ def cpu_bound (number ):
7+ return sum (i * i for i in range (number ))
8+
9+
10+ def find_sums (numbers ):
11+ with multiprocessing .Pool () as pool :
12+ pool .map (cpu_bound , numbers )
13+
14+
15+ if __name__ == "__main__" :
16+ numbers = [5_000_000 + x for x in range (20 )]
17+
18+ start_time = time .time ()
19+ find_sums (numbers )
20+ duration = time .time () - start_time
21+ print (f"Duration { duration } seconds" )
Original file line number Diff line number Diff line change 1+ #!/usr/bin/env python3
2+ import time
3+
4+
5+ def cpu_bound (number ):
6+ return sum (i * i for i in range (number ))
7+
8+
9+ def find_sums (numbers ):
10+ for number in numbers :
11+ cpu_bound (number )
12+
13+
14+ if __name__ == "__main__" :
15+ numbers = [5_000_000 + x for x in range (20 )]
16+
17+ start_time = time .time ()
18+ find_sums (numbers )
19+ duration = time .time () - start_time
20+ print (f"Duration { duration } seconds" )
Original file line number Diff line number Diff line change 1+ #!/usr/bin/env python3
2+ import concurrent .futures
3+ import time
4+
5+
6+ def cpu_bound (number ):
7+ return sum (i * i for i in range (number ))
8+
9+
10+ def find_sums (numbers ):
11+ with concurrent .futures .ThreadPoolExecutor (max_workers = 5 ) as executor :
12+ executor .map (cpu_bound , numbers )
13+
14+
15+ if __name__ == "__main__" :
16+ numbers = [5_000_000 + x for x in range (20 )]
17+
18+ start_time = time .time ()
19+ find_sums (numbers )
20+ duration = time .time () - start_time
21+ print (f"Duration { duration } seconds" )
Original file line number Diff line number Diff line change 1+ #!/usr/bin/env python3
2+ import asyncio
3+ import time
4+ import aiohttp
5+
6+
7+ async def download_site (session , url ):
8+ async with session .get (url ) as response :
9+ print ("Read {0} from {1}" .format (response .content_length , url ))
10+
11+
12+ async def download_all_sites (sites ):
13+ async with aiohttp .ClientSession () as session :
14+ tasks = []
15+ for url in sites :
16+ task = asyncio .ensure_future (download_site (session , url ))
17+ tasks .append (task )
18+ await asyncio .gather (* tasks , return_exceptions = True )
19+
20+
21+ if __name__ == "__main__" :
22+ sites = [
23+ "http://www.jython.org" ,
24+ "http://olympus.realpython.org/dice" ,
25+ ] * 80
26+ start_time = time .time ()
27+ asyncio .get_event_loop ().run_until_complete (download_all_sites (sites ))
28+ duration = time .time () - start_time
29+ print (f"Downloaded { len (sites )} sites in { duration } seconds" )
Original file line number Diff line number Diff line change 1+ #!/usr/bin/env python3
2+ import requests
3+ import multiprocessing
4+ import time
5+
6+ session = None
7+
8+
9+ def set_global_session ():
10+ global session
11+ if not session :
12+ session = requests .Session ()
13+
14+
15+ def download_site (url ):
16+ with session .get (url ) as response :
17+ name = multiprocessing .current_process ().name
18+ print (f"{ name } :Read { len (response .content )} from { url } " )
19+
20+
21+ def download_all_sites (sites ):
22+ with multiprocessing .Pool (initializer = set_global_session ) as pool :
23+ pool .map (download_site , sites )
24+
25+
26+ if __name__ == "__main__" :
27+ sites = [
28+ "http://www.jython.org" ,
29+ "http://olympus.realpython.org/dice" ,
30+ ] * 80
31+ start_time = time .time ()
32+ download_all_sites (sites )
33+ duration = time .time () - start_time
34+ print (f"Downloaded { len (sites )} in { duration } seconds" )
Original file line number Diff line number Diff line change 1+ #!/usr/bin/env python3
2+ import requests
3+ import time
4+
5+
6+ def download_site (url , session ):
7+ with session .get (url ) as response :
8+ print (f"Read { len (response .content )} from { url } " )
9+
10+
11+ def download_all_sites (sites ):
12+ with requests .Session () as session :
13+ for url in sites :
14+ download_site (url , session )
15+
16+
17+ if __name__ == "__main__" :
18+ sites = [
19+ "http://www.jython.org" ,
20+ "http://olympus.realpython.org/dice" ,
21+ ] * 80
22+ start_time = time .time ()
23+ download_all_sites (sites )
24+ duration = time .time () - start_time
25+ print (f"Downloaded { len (sites )} in { duration } seconds" )
Original file line number Diff line number Diff line change 1+ #!/usr/bin/env python3
2+ import concurrent .futures
3+ import requests
4+ import threading
5+ import time
6+
7+
8+ thread_local = threading .local ()
9+
10+
11+ def get_session ():
12+ if not getattr (thread_local , "session" , None ):
13+ thread_local .session = requests .Session ()
14+ return thread_local .session
15+
16+
17+ def download_site (url ):
18+ session = get_session ()
19+ with session .get (url ) as response :
20+ print (f"Read { len (response .content )} from { url } " )
21+
22+
23+ def download_all_sites (sites ):
24+ with concurrent .futures .ThreadPoolExecutor (max_workers = 5 ) as executor :
25+ executor .map (download_site , sites )
26+
27+
28+ if __name__ == "__main__" :
29+ sites = [
30+ "http://www.jython.org" ,
31+ "http://olympus.realpython.org/dice" ,
32+ ] * 80
33+ start_time = time .time ()
34+ download_all_sites (sites )
35+ duration = time .time () - start_time
36+ print (f"Downloaded { len (sites )} in { duration } seconds" )
Original file line number Diff line number Diff line change 1+ #!/usr/bin/env python3
2+ import concurrent .futures
3+
4+
5+ counter = 0
6+
7+
8+ def increment_counter (fake_value ):
9+ global counter
10+ for _ in range (100 ):
11+ counter += 1
12+
13+
14+ if __name__ == "__main__" :
15+ fake_data = [x for x in range (5000 )]
16+ counter = 0
17+ with concurrent .futures .ThreadPoolExecutor (max_workers = 5000 ) as executor :
18+ executor .map (increment_counter , fake_data )
Original file line number Diff line number Diff line change 1+ aiohttp == 3.4.4
2+ asks == 2.0.0
3+ astroid == 2.0.4
4+ async-generator == 1.10
5+ async-timeout == 3.0.1
6+ atomicwrites == 1.2.0
7+ attrs == 18.1.0
8+ certifi == 2018.8.13
9+ chardet == 3.0.4
10+ contextvars == 2.3
11+ flake8 == 3.5.0
12+ h11 == 0.8.1
13+ idna == 2.7
14+ immutables == 0.6
15+ isort == 4.3.4
16+ lazy-object-proxy == 1.3.1
17+ mccabe == 0.6.1
18+ more-itertools == 4.3.0
19+ multidict == 4.4.2
20+ multio == 0.2.3
21+ outcome == 0.1.0
22+ pathlib2 == 2.3.2
23+ pkg-resources == 0.0.0
24+ pluggy == 0.7.1
25+ py == 1.6.0
26+ pycodestyle == 2.3.1
27+ pyflakes == 1.6.0
28+ pylint == 2.1.1
29+ pytest == 3.7.3
30+ requests == 2.19.1
31+ six == 1.11.0
32+ sniffio == 1.0.0
33+ sortedcontainers == 2.0.4
34+ tqdm == 4.25.0
35+ trio == 0.6.0
36+ typed-ast == 1.1.0
37+ urllib3 == 1.23
38+ wrapt == 1.10.11
39+ yarl == 1.2.6
You can’t perform that action at this time.
0 commit comments