33import sys
44import ssl
55import certifi
6+ import time
67from pprint import pprint
78from bs4 import BeautifulSoup
89
9- sys .path .append (sys .path [0 ] + "/../../.. " )
10+ sys .path .append (sys .path [0 ] + "/../../" )
1011
1112from pageobject .locators import locators
1213from pageobject .helpers import helpers
@@ -25,7 +26,7 @@ async def fetch(url, session):
2526
2627async def scrap_ecommerce (url ):
2728 ssl_context = ssl .create_default_context (cafile = certifi .where ())
28- async with aiohttp .ClientSession (connector = aiohttp .TCPConnector (ssl_context = ssl_context )) as session :
29+ async with aiohttp .ClientSession (connector = aiohttp .TCPConnector (ssl = ssl_context )) as session :
2930 html = await fetch (url , session )
3031 soup = BeautifulSoup (html , 'html.parser' )
3132
@@ -48,6 +49,7 @@ async def scrap_ecommerce(url):
4849 return meta_data_arr
4950
5051async def main ():
52+ start_time = time .time ()
5153 base_url = locators .test_bs4_url
5254 tasks = [scrap_ecommerce (f"{ base_url } &page={ i } " ) for i in range (1 , 6 )]
5355 results = await asyncio .gather (* tasks )
@@ -57,7 +59,8 @@ async def main():
5759 print ("*********************************************************************************************************" )
5860 helpers .print_scrapped_content (result )
5961 print ()
62+
63+ print ("\n Time elapsed is " + str ((time .time () - start_time )) + " seconds" )
6064
6165if __name__ == '__main__' :
62- output = asyncio .run (main ())
63- print (output )
66+ output = asyncio .run (main ())
0 commit comments