|
1 | 1 | import asyncio |
2 | 2 | import os |
| 3 | +import sys |
3 | 4 | import time |
| 5 | +from concurrent.futures import ThreadPoolExecutor |
4 | 6 | from contextlib import contextmanager |
5 | | -from typing import Any, Coroutine, Iterator, List |
| 7 | +from typing import Any, Callable, Coroutine, Iterator, List |
6 | 8 |
|
7 | 9 | import aiohttp |
8 | 10 | import matplotlib.pyplot as plt # type: ignore[import-untyped] |
9 | 11 | import pyinstrument |
| 12 | +import requests # type: ignore[import-untyped] |
10 | 13 | from matplotlib.axes import Axes # type: ignore[import-untyped] |
| 14 | +from requests.adapters import HTTPAdapter # type: ignore[import-untyped] |
11 | 15 |
|
12 | 16 | import httpcore |
13 | 17 |
|
@@ -35,19 +39,18 @@ def profile(): |
35 | 39 | profiler.open_in_browser() |
36 | 40 |
|
37 | 41 |
|
38 | | -async def gather_limited_concurrency( |
39 | | - coros: Iterator[Coroutine[Any, Any, Any]], concurrency: int = CONCURRENCY |
40 | | -) -> None: |
41 | | - sem = asyncio.Semaphore(concurrency) |
42 | | - |
43 | | - async def coro_with_sem(coro: Coroutine[Any, Any, Any]) -> None: |
44 | | - async with sem: |
45 | | - await coro |
| 42 | +async def run_async_requests(axis: Axes) -> None: |
| 43 | + async def gather_limited_concurrency( |
| 44 | + coros: Iterator[Coroutine[Any, Any, Any]], concurrency: int = CONCURRENCY |
| 45 | + ) -> None: |
| 46 | + sem = asyncio.Semaphore(concurrency) |
46 | 47 |
|
47 | | - await asyncio.gather(*(coro_with_sem(c) for c in coros)) |
| 48 | + async def coro_with_sem(coro: Coroutine[Any, Any, Any]) -> None: |
| 49 | + async with sem: |
| 50 | + await coro |
48 | 51 |
|
| 52 | + await asyncio.gather(*(coro_with_sem(c) for c in coros)) |
49 | 53 |
|
50 | | -async def run_requests(axis: Axes) -> None: |
51 | 54 | async def httpcore_get( |
52 | 55 | pool: httpcore.AsyncConnectionPool, timings: List[int] |
53 | 56 | ) -> None: |
@@ -99,9 +102,85 @@ async def aiohttp_get(session: aiohttp.ClientSession, timings: List[int]) -> Non |
99 | 102 | ) |
100 | 103 |
|
101 | 104 |
|
| 105 | +def run_sync_requests(axis: Axes) -> None: |
| 106 | + def run_in_executor( |
| 107 | + fns: Iterator[Callable[[], None]], executor: ThreadPoolExecutor |
| 108 | + ) -> None: |
| 109 | + futures = [executor.submit(fn) for fn in fns] |
| 110 | + for future in futures: |
| 111 | + future.result() |
| 112 | + |
| 113 | + def httpcore_get(pool: httpcore.ConnectionPool, timings: List[int]) -> None: |
| 114 | + start = time.monotonic() |
| 115 | + res = pool.request("GET", URL) |
| 116 | + assert len(res.read()) == 2000 |
| 117 | + assert res.status == 200, f"status_code={res.status}" |
| 118 | + timings.append(duration(start)) |
| 119 | + |
| 120 | + def requests_get(session: requests.Session, timings: List[int]) -> None: |
| 121 | + start = time.monotonic() |
| 122 | + res = session.get(URL) |
| 123 | + assert len(res.text) == 2000 |
| 124 | + assert res.status_code == 200, f"status={res.status_code}" |
| 125 | + timings.append(duration(start)) |
| 126 | + |
| 127 | + with httpcore.ConnectionPool(max_connections=POOL_LIMIT) as pool: |
| 128 | + # warmup |
| 129 | + with ThreadPoolExecutor(max_workers=CONCURRENCY * 2) as exec: |
| 130 | + run_in_executor( |
| 131 | + (lambda: httpcore_get(pool, []) for _ in range(REQUESTS)), |
| 132 | + exec, |
| 133 | + ) |
| 134 | + |
| 135 | + timings: List[int] = [] |
| 136 | + exec = ThreadPoolExecutor(max_workers=CONCURRENCY) |
| 137 | + start = time.monotonic() |
| 138 | + with profile(): |
| 139 | + for _ in range(REPEATS): |
| 140 | + run_in_executor( |
| 141 | + (lambda: httpcore_get(pool, timings) for _ in range(REQUESTS)), exec |
| 142 | + ) |
| 143 | + exec.shutdown(wait=True) |
| 144 | + axis.plot( |
| 145 | + [*range(len(timings))], timings, label=f"httpcore (tot={duration(start)}ms)" |
| 146 | + ) |
| 147 | + |
| 148 | + with requests.Session() as session: |
| 149 | + session.mount( |
| 150 | + "http://", HTTPAdapter(pool_connections=POOL_LIMIT, pool_maxsize=POOL_LIMIT) |
| 151 | + ) |
| 152 | + # warmup |
| 153 | + with ThreadPoolExecutor(max_workers=CONCURRENCY * 2) as exec: |
| 154 | + run_in_executor( |
| 155 | + (lambda: requests_get(session, []) for _ in range(REQUESTS)), |
| 156 | + exec, |
| 157 | + ) |
| 158 | + |
| 159 | + timings = [] |
| 160 | + exec = ThreadPoolExecutor(max_workers=CONCURRENCY) |
| 161 | + start = time.monotonic() |
| 162 | + for _ in range(REPEATS): |
| 163 | + run_in_executor( |
| 164 | + (lambda: requests_get(session, timings) for _ in range(REQUESTS)), |
| 165 | + exec, |
| 166 | + ) |
| 167 | + exec.shutdown(wait=True) |
| 168 | + axis.plot( |
| 169 | + [*range(len(timings))], timings, label=f"requests (tot={duration(start)}ms)" |
| 170 | + ) |
| 171 | + |
| 172 | + |
102 | 173 | def main() -> None: |
| 174 | + mode = sys.argv[1] if len(sys.argv) == 2 else None |
| 175 | + assert mode in ("async", "sync"), "Usage: python client.py <async|sync>" |
| 176 | + |
103 | 177 | fig, ax = plt.subplots() |
104 | | - asyncio.run(run_requests(ax)) |
| 178 | + |
| 179 | + if mode == "async": |
| 180 | + asyncio.run(run_async_requests(ax)) |
| 181 | + else: |
| 182 | + run_sync_requests(ax) |
| 183 | + |
105 | 184 | plt.legend(loc="upper left") |
106 | 185 | ax.set_xlabel("# request") |
107 | 186 | ax.set_ylabel("[ms]") |
|
0 commit comments