I'm trying to iteratively create urls combining two url variables and a list of unique uuids that need to be added to the url like so .com/{test_adv_uuid}/
. My current code looks like this:
import logging
import requests
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
order_id = 1
order_amount = 10
test_adv_uuid = [
f'08d79951-1908-4f0f-91aa-40eee5ac0e3f',
f'30816356-4be2-4def-9793-2913dc7dae82',
f'40a9420a-3bad-4778-930d-0e3da35355d1',
f'9c2c4477-21ea-4b90-b72b-db40c8ae9754',
f'6f0fa70b-4914-458d-8b02-0b6a4475773f',
f'9614bd9f-afa0-4d93-b709-beb38c99cd66',
]
test_adv_url_body = f'https://listen.whatever.com/{test_adv_uuid}/pixel.png'
test_adv_url_attr = f'https://listen.whatever.com/{test_adv_uuid}/pixel.png?order={order_id}&value={order_amount}'
# look up thread pool stuff for understanding on the download & logging
THREAD_POOL = 16
session = requests.Session()
session.mount(
'https://',
requests.adapters.HTTPAdapter(pool_maxsize=THREAD_POOL,
max_retries=3,
pool_block=True)
)
def get(url):
response = session.get(url)
logging.info("request was completed in %s seconds [%s]", response.elapsed.total_seconds(), response.url)
if response.status_code != 200:
logging.error("request failed, error code %s [%s]", response.status_code, response.url)
if 500 <= response.status_code < 600:
# server is overloaded? give it a break
time.sleep(5)
return response
def download(urls):
with ThreadPoolExecutor(max_workers=THREAD_POOL) as executor:
# wrap in a list() to wait for all requests to complete
for response in list(executor.map(get, urls)):
if response.status_code == 200:
print(response.content)
def main():
logging.basicConfig(
format='%(asctime)s.%(msecs)03d %(levelname)-8s %(message)s',
level=logging.INFO,
datefmt='%Y-%m-%d %H:%M:%S'
)
urls = [
test_adv_url_body,
test_adv_url_attr
]
download(urls)
if __name__ == "__main__":
main()
The output I get combines all the uuid items in the list into a single url and looks like:
request was completed in 0.286232 seconds [https://listen.whatever.com/['08d79951-1908-4f0f-91aa-40eee5ac0e3f', '30816356-4be2-4def-9793-2913dc7dae82', '40a9420a-3bad-4778-930d-0e3da35355d1', '9c2c4477-21ea-4b90-b72b-db40c8ae9754', '6f0fa70b-4914-458d-8b02-0b6a4475773f', '9614bd9f-afa0-4d93-b709-beb38c99cd66']/pixel.png?order=1&value=10]
How would I go about refactoring this to iteratively make each one of these urls with the dynamic uuids into a separate single request?
Would I iteratively create the urls in a list? or within the request call itself?
CodePudding user response:
try this
# better style to name your lists with plurals
test_adv_uuids = [
'08d79951-1908-4f0f-91aa-40eee5ac0e3f',
'30816356-4be2-4def-9793-2913dc7dae82',
'40a9420a-3bad-4778-930d-0e3da35355d1',
'9c2c4477-21ea-4b90-b72b-db40c8ae9754',
'6f0fa70b-4914-458d-8b02-0b6a4475773f',
'9614bd9f-afa0-4d93-b709-beb38c99cd66',
]
# use a list comprehension to build the list of urls
test_adv_url_bodies = [f'https://listen.whatever.com/{uuid}/pixel.png' for uuid in test_adv_uuids]
test_adv_url_attrs = [f'https://listen.whatever.com/{test_adv_uuid}/pixel.png?order={order_id}&value={order_amount}' for uuid in test_adv_uuids]
and then in your main()
you'd have
urls = test_adv_url_bodies test_adv_url_attrs
this is technically slightly less efficient than creating the urls
list in a single for
loop, rather than using two list comprehensions, so I'll leave that as an exercise for the reader