USE HANDS
"""Imagine want to grab 22 JSON structures of cats from this api ->
https://cataas.com/cat?json=true,
which returns a single random cat image JSON, without fetching more than 3
concurrently - that is, you can only have 3 concurrent HTTPS requests running at a
time, but you should
still optimize for speed.
The API returns a random JSON structure that represents a cute cat picture. The
problem is similar to one we run into all the time: Upload 10's of thousands of
images to AWS S3, for example, without exceeding the limits of the network on the
machine doing the uploading.
"""
import asyncio
import aiohttp
from pprint import pprint
async def fetch_cat():
print("fetching a cat---")
async with aiohttp.ClientSession() as session:
async with session.get("https://cataas.com/cat?json=true",
headers = {'accepts': 'application/json'}
) as response:
# print(f"\n{await response.text() = } {response.headers['content-type']
=}")
if response.headers["content-type"] == "application/json":
print("\tFetched a cat")
return await response.json()
async def main():
semaphore = asyncio.Semaphore(3)
# tasks = []
# for _ in range(22):
# tasks.append(asyncio.create_task(fetch_cat()))
cats = []
# for task in tasks:
# await semaphore.acquire()
# cat = await task
# cats.append(cat)
# semaphore.release()
while len(cats) < 22:
await semaphore.acquire()
task = asyncio.create_task(fetch_cat())
cat = await task
if cat is not None:
cats.append(cat)
semaphore.release()
return cats
cats_all= asyncio.run(main())
pprint(cats_all)
print(len(cats_all))
do /u have linkedn?
nono
come close