importaiohttpimportcrawlerdepth, calls=2, 50# Arbitrary integersasyncwithaiohttp.ClientSession() assession:
c=crawler.Crawler("http://starting.link", session):
c.write_out("neatfile") # This will write out the list of stored links seperated by newlines to the file named "neatfile"