Unfortunately, as far as I know, the requests library is not equipped for performing asynchronous requests. You can wrap async/await
syntax around requests
, but that will make the underlying requests no less synchronous. If you want true async requests, you must use other tooling that provides it. One such solution is aiohttp
(Python 3.5.3+). It works well in my experience using it with the Python 3.7 async/await
syntax. Below I write three implementations of performing n web requests using
sync_requests_get_all
) using the Python requests
libraryasync_requests_get_all
) using the Python requests
library wrapped in Python 3.7 async/await
syntax and asyncio
async_aiohttp_get_all
) with the Python aiohttp
library wrapped in Python 3.7 async/await
syntax and asyncio
import time
import asyncio
import requests
import aiohttp
from types import SimpleNamespace
durations = []
def timed(func):
"""
records approximate durations of function calls
"""
def wrapper(*args, **kwargs):
start = time.time()
print(f'{func.__name__:<30} started')
result = func(*args, **kwargs)
duration = f'{func.__name__:<30} finished in {time.time() - start:.2f} seconds'
print(duration)
durations.append(duration)
return result
return wrapper
async def fetch(url, session):
"""
asynchronous get request
"""
async with session.get(url) as response:
response_json = await response.json()
return SimpleNamespace(**response_json)
async def fetch_many(loop, urls):
"""
many asynchronous get requests, gathered
"""
async with aiohttp.ClientSession() as session:
tasks = [loop.create_task(fetch(url, session)) for url in urls]
return await asyncio.gather(*tasks)
@timed
def sync_requests_get_all(urls):
"""
performs synchronous get requests
"""
# use session to reduce network overhead
session = requests.Session()
return [SimpleNamespace(**session.get(url).json()) for url in urls]
@timed
def async_requests_get_all(urls):
"""
asynchronous wrapper around synchronous requests
"""
loop = asyncio.get_event_loop()
# use session to reduce network overhead
session = requests.Session()
async def async_get(url):
return session.get(url)
async_tasks = [loop.create_task(async_get(url)) for url in urls]
return loop.run_until_complete(asyncio.gather(*async_tasks))
@timed
def asnyc_aiohttp_get_all(urls):
"""
performs asynchronous get requests
"""
loop = asyncio.get_event_loop()
return loop.run_until_complete(fetch_many(loop, urls))
if __name__ == '__main__':
# this endpoint takes ~3 seconds to respond,
# so a purely synchronous implementation should take
# little more than 30 seconds and a purely asynchronous
# implementation should take little more than 3 seconds.
urls = ['https://postman-echo.com/delay/3']*10
sync_requests_get_all(urls)
async_requests_get_all(urls)
asnyc_aiohttp_get_all(urls)
print('----------------------')
[print(duration) for duration in durations]
On my machine, this is the output:
sync_requests_get_all started
sync_requests_get_all finished in 30.92 seconds
async_requests_get_all started
async_requests_get_all finished in 30.87 seconds
asnyc_aiohttp_get_all started
asnyc_aiohttp_get_all finished in 3.22 seconds
----------------------
sync_requests_get_all finished in 30.92 seconds
async_requests_get_all finished in 30.87 seconds
asnyc_aiohttp_get_all finished in 3.22 seconds