import asyncio import aiohttp import markdown from bs4 import BeautifulSoup with open("README.md") as f: body_markdown = f.read() html_page = markdown.markdown(body_markdown) print("Gathering links...") urls = [] soup = BeautifulSoup(html_page, features="lxml") for link in soup.findAll("a"): urls.append(link.get("href")) async def get_url_response(session, url): try: async with session.get(url) as resp: return (resp.status, url) except Exception: return (000, url) async def main(): timeout = aiohttp.ClientTimeout(total=30) async with aiohttp.ClientSession(timeout=timeout) as session: tasks = [] for url in urls: tasks.append(asyncio.ensure_future(get_url_response(session, url))) print("Checking links...") responses = await asyncio.gather(*tasks) for _, (resp, url) in enumerate(responses): if resp != 200: print(resp, url) loop = asyncio.get_event_loop() loop.run_until_complete(main())