Lets say I have some tasks I want to run asynchronously.
I want to make a series of 4 API calls asynchronously, but I also would like to have another asynchronous function checking on status of my session.
Using asyncio
I have tried:
import aiohttp
import asyncio
async def make_request():
async with aiohttp.ClientSession() as session:
async with session.get('http://httpbin.org/get') as resp:
print(resp.status)
print(await resp.text())
return resp
async def say_hello():
print("Hello")
async def main():
tasks = []
for i in range(4):
tasks.append(asyncio.create_task(make_request()))
results = await asyncio.gather(*tasks, say_hello(),)
if __name__ == "__main__":
event_loop = asyncio.get_event_loop()
event_loop.run_until_complete(main())
In reality, my say_hello()
is checking a status and updating a class attribute (the session) while making requests according to a set time frequency. How could I accomplish this "status" function running alongside my actual API calls execution in make_request()
which return a response object which I later process when finished.
CodePudding user response:
I think you should structure your code a little bit differently. Lets make while True:
loop in say_hello()
and put await asyncio.sleep(N)
there. Move say_hello()
out of asyncio.gather
:
import aiohttp
import asyncio
async def make_request():
async with aiohttp.ClientSession() as session:
async with session.get("http://httpbin.org/get") as resp:
resp.status
await resp.text()
await asyncio.sleep(2) # sleep here artificially
return resp
async def say_hello():
while True:
# update session here
# ...
await asyncio.sleep(1)
print("Hello")
async def main():
tasks = []
for i in range(4):
tasks.append(asyncio.create_task(make_request()))
asyncio.create_task(say_hello())
results = await asyncio.gather(
*tasks,
)
print(results)
if __name__ == "__main__":
event_loop = asyncio.get_event_loop()
event_loop.run_until_complete(main())
EDIT: Example with global session:
import aiohttp
import asyncio
session = None
headers = {"MySessionHeader": "0"}
# limit concurrency of connections to 2
sem = asyncio.Semaphore(2)
async def make_request():
async with sem, session.get(
"http://httpbin.org/get", headers=headers
) as resp:
resp.status
print(await resp.text())
await asyncio.sleep(2) # sleep here artificially
return resp
async def update_headers():
count = 1
while True:
await asyncio.sleep(1)
# update headers of session:
# eg. update cookies/headers
headers["MySessionHeader"] = str(count)
count = 1
async def main():
global session
session = aiohttp.ClientSession()
tasks = []
for i in range(24):
tasks.append(asyncio.create_task(make_request()))
asyncio.create_task(update_headers())
results = await asyncio.gather(
*tasks,
)
print(results)
await session.close()
if __name__ == "__main__":
event_loop = asyncio.get_event_loop()
event_loop.run_until_complete(main())